2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "disas/disas.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "sysemu/kvm.h"
32 #include "exec/semihost.h"
34 #include "trace-tcg.h"
36 #define MIPS_DEBUG_DISAS 0
38 /* MIPS major opcodes */
39 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
42 /* indirect opcode tables */
43 OPC_SPECIAL
= (0x00 << 26),
44 OPC_REGIMM
= (0x01 << 26),
45 OPC_CP0
= (0x10 << 26),
46 OPC_CP1
= (0x11 << 26),
47 OPC_CP2
= (0x12 << 26),
48 OPC_CP3
= (0x13 << 26),
49 OPC_SPECIAL2
= (0x1C << 26),
50 OPC_SPECIAL3
= (0x1F << 26),
51 /* arithmetic with immediate */
52 OPC_ADDI
= (0x08 << 26),
53 OPC_ADDIU
= (0x09 << 26),
54 OPC_SLTI
= (0x0A << 26),
55 OPC_SLTIU
= (0x0B << 26),
56 /* logic with immediate */
57 OPC_ANDI
= (0x0C << 26),
58 OPC_ORI
= (0x0D << 26),
59 OPC_XORI
= (0x0E << 26),
60 OPC_LUI
= (0x0F << 26),
61 /* arithmetic with immediate */
62 OPC_DADDI
= (0x18 << 26),
63 OPC_DADDIU
= (0x19 << 26),
64 /* Jump and branches */
66 OPC_JAL
= (0x03 << 26),
67 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
68 OPC_BEQL
= (0x14 << 26),
69 OPC_BNE
= (0x05 << 26),
70 OPC_BNEL
= (0x15 << 26),
71 OPC_BLEZ
= (0x06 << 26),
72 OPC_BLEZL
= (0x16 << 26),
73 OPC_BGTZ
= (0x07 << 26),
74 OPC_BGTZL
= (0x17 << 26),
75 OPC_JALX
= (0x1D << 26),
76 OPC_DAUI
= (0x1D << 26),
78 OPC_LDL
= (0x1A << 26),
79 OPC_LDR
= (0x1B << 26),
80 OPC_LB
= (0x20 << 26),
81 OPC_LH
= (0x21 << 26),
82 OPC_LWL
= (0x22 << 26),
83 OPC_LW
= (0x23 << 26),
84 OPC_LWPC
= OPC_LW
| 0x5,
85 OPC_LBU
= (0x24 << 26),
86 OPC_LHU
= (0x25 << 26),
87 OPC_LWR
= (0x26 << 26),
88 OPC_LWU
= (0x27 << 26),
89 OPC_SB
= (0x28 << 26),
90 OPC_SH
= (0x29 << 26),
91 OPC_SWL
= (0x2A << 26),
92 OPC_SW
= (0x2B << 26),
93 OPC_SDL
= (0x2C << 26),
94 OPC_SDR
= (0x2D << 26),
95 OPC_SWR
= (0x2E << 26),
96 OPC_LL
= (0x30 << 26),
97 OPC_LLD
= (0x34 << 26),
98 OPC_LD
= (0x37 << 26),
99 OPC_LDPC
= OPC_LD
| 0x5,
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* Compact Branches */
113 OPC_BLEZALC
= (0x06 << 26),
114 OPC_BGEZALC
= (0x06 << 26),
115 OPC_BGEUC
= (0x06 << 26),
116 OPC_BGTZALC
= (0x07 << 26),
117 OPC_BLTZALC
= (0x07 << 26),
118 OPC_BLTUC
= (0x07 << 26),
119 OPC_BOVC
= (0x08 << 26),
120 OPC_BEQZALC
= (0x08 << 26),
121 OPC_BEQC
= (0x08 << 26),
122 OPC_BLEZC
= (0x16 << 26),
123 OPC_BGEZC
= (0x16 << 26),
124 OPC_BGEC
= (0x16 << 26),
125 OPC_BGTZC
= (0x17 << 26),
126 OPC_BLTZC
= (0x17 << 26),
127 OPC_BLTC
= (0x17 << 26),
128 OPC_BNVC
= (0x18 << 26),
129 OPC_BNEZALC
= (0x18 << 26),
130 OPC_BNEC
= (0x18 << 26),
131 OPC_BC
= (0x32 << 26),
132 OPC_BEQZC
= (0x36 << 26),
133 OPC_JIC
= (0x36 << 26),
134 OPC_BALC
= (0x3A << 26),
135 OPC_BNEZC
= (0x3E << 26),
136 OPC_JIALC
= (0x3E << 26),
137 /* MDMX ASE specific */
138 OPC_MDMX
= (0x1E << 26),
139 /* MSA ASE, same as MDMX */
141 /* Cache and prefetch */
142 OPC_CACHE
= (0x2F << 26),
143 OPC_PREF
= (0x33 << 26),
144 /* PC-relative address computation / loads */
145 OPC_PCREL
= (0x3B << 26),
148 /* PC-relative address computation / loads */
149 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
150 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
152 /* Instructions determined by bits 19 and 20 */
153 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
154 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
155 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
157 /* Instructions determined by bits 16 ... 20 */
158 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
159 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
162 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
165 /* MIPS special opcodes */
166 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
170 OPC_SLL
= 0x00 | OPC_SPECIAL
,
171 /* NOP is SLL r0, r0, 0 */
172 /* SSNOP is SLL r0, r0, 1 */
173 /* EHB is SLL r0, r0, 3 */
174 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
175 OPC_ROTR
= OPC_SRL
| (1 << 21),
176 OPC_SRA
= 0x03 | OPC_SPECIAL
,
177 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
178 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
179 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
180 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
181 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
182 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
183 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
184 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
185 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
186 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
187 OPC_DROTR
= OPC_DSRL
| (1 << 21),
188 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
189 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
190 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
191 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
192 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
193 /* Multiplication / division */
194 OPC_MULT
= 0x18 | OPC_SPECIAL
,
195 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
196 OPC_DIV
= 0x1A | OPC_SPECIAL
,
197 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
198 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
199 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
200 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
201 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
203 /* 2 registers arithmetic / logic */
204 OPC_ADD
= 0x20 | OPC_SPECIAL
,
205 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
206 OPC_SUB
= 0x22 | OPC_SPECIAL
,
207 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
208 OPC_AND
= 0x24 | OPC_SPECIAL
,
209 OPC_OR
= 0x25 | OPC_SPECIAL
,
210 OPC_XOR
= 0x26 | OPC_SPECIAL
,
211 OPC_NOR
= 0x27 | OPC_SPECIAL
,
212 OPC_SLT
= 0x2A | OPC_SPECIAL
,
213 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
214 OPC_DADD
= 0x2C | OPC_SPECIAL
,
215 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
216 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
217 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
219 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
220 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
222 OPC_TGE
= 0x30 | OPC_SPECIAL
,
223 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
224 OPC_TLT
= 0x32 | OPC_SPECIAL
,
225 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
226 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
227 OPC_TNE
= 0x36 | OPC_SPECIAL
,
228 /* HI / LO registers load & stores */
229 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
230 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
231 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
232 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
233 /* Conditional moves */
234 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
235 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
237 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
238 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
240 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
243 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
244 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
245 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
246 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
247 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
249 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
250 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
251 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
252 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
255 /* R6 Multiply and Divide instructions have the same Opcode
256 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
257 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
260 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
261 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
262 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
263 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
264 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
265 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
266 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
267 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
269 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
270 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
271 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
272 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
273 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
274 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
275 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
276 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
278 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
279 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
280 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
281 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
282 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
284 OPC_LSA
= 0x05 | OPC_SPECIAL
,
285 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
288 /* Multiplication variants of the vr54xx. */
289 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
292 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
293 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
294 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
295 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
297 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
301 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
302 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
303 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
304 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
305 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
308 /* REGIMM (rt field) opcodes */
309 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
312 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
313 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
314 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
315 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
316 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
317 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
318 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
319 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
320 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
321 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
322 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
323 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
324 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
325 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
326 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
328 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
329 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
332 /* Special2 opcodes */
333 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
336 /* Multiply & xxx operations */
337 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
338 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
339 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
340 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
341 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
343 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
344 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
345 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
346 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
347 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
348 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
349 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
350 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
351 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
352 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
353 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
354 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
356 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
357 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
358 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
359 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
361 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
364 /* Special3 opcodes */
365 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
368 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
369 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
370 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
371 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
372 OPC_INS
= 0x04 | OPC_SPECIAL3
,
373 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
374 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
375 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
376 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
377 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
378 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
379 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
380 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
383 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
384 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
385 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
386 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
387 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
388 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
389 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
390 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
391 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
392 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
393 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
394 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
397 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
398 /* MIPS DSP Arithmetic */
399 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
400 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
401 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
402 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
403 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
404 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
405 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
406 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
407 /* MIPS DSP GPR-Based Shift Sub-class */
408 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
409 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
410 /* MIPS DSP Multiply Sub-class insns */
411 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
412 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
413 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
414 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
415 /* DSP Bit/Manipulation Sub-class */
416 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
417 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
418 /* MIPS DSP Append Sub-class */
419 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
420 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
421 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
422 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
423 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
426 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
427 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
428 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
429 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
430 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
431 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
435 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
438 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
439 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
440 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
441 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
442 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
443 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
447 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
450 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
451 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
452 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
453 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
454 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
457 /* MIPS DSP REGIMM opcodes */
459 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
460 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
463 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
466 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
467 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
468 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
469 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
472 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
474 /* MIPS DSP Arithmetic Sub-class */
475 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
476 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
477 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
478 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
482 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
483 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
484 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
489 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
493 /* MIPS DSP Multiply Sub-class insns */
494 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
497 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
502 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
503 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
505 /* MIPS DSP Arithmetic Sub-class */
506 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
507 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
508 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
509 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
525 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
527 /* MIPS DSP Arithmetic Sub-class */
528 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
529 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
530 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
531 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
541 /* DSP Bit/Manipulation Sub-class */
542 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
549 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
551 /* MIPS DSP Arithmetic Sub-class */
552 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
553 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
554 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
555 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 /* DSP Compare-Pick Sub-class */
560 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
577 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
579 /* MIPS DSP GPR-Based Shift Sub-class */
580 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
581 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
582 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
583 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
604 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
606 /* MIPS DSP Multiply Sub-class insns */
607 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
608 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
609 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
610 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
631 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
633 /* DSP Bit/Manipulation Sub-class */
634 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
637 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Append Sub-class */
640 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
641 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
642 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
645 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
647 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
648 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
649 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
650 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
651 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
660 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
661 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
662 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
663 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
664 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
667 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* MIPS DSP Arithmetic Sub-class */
670 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
671 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
672 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
673 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
687 /* DSP Bit/Manipulation Sub-class */
688 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
696 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
698 /* MIPS DSP Multiply Sub-class insns */
699 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
700 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
701 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
702 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
704 /* MIPS DSP Arithmetic Sub-class */
705 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
716 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
728 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
730 /* DSP Compare-Pick Sub-class */
731 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
732 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
733 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
734 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
750 /* MIPS DSP Arithmetic Sub-class */
751 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
761 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Append Sub-class */
764 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
765 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
766 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
767 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
770 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
772 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
773 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
774 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
775 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
776 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
796 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
798 /* DSP Bit/Manipulation Sub-class */
799 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
802 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
804 /* MIPS DSP Multiply Sub-class insns */
805 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
806 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
807 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
808 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
833 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
835 /* MIPS DSP GPR-Based Shift Sub-class */
836 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
837 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
838 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
839 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
864 /* Coprocessor 0 (rs field) */
865 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
868 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
869 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
870 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
871 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
872 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
873 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
874 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
875 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
876 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
877 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
878 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
879 OPC_C0
= (0x10 << 21) | OPC_CP0
,
880 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
881 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
885 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
888 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
889 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
890 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
891 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
892 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
893 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 /* Coprocessor 0 (with rs == C0) */
897 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
900 OPC_TLBR
= 0x01 | OPC_C0
,
901 OPC_TLBWI
= 0x02 | OPC_C0
,
902 OPC_TLBINV
= 0x03 | OPC_C0
,
903 OPC_TLBINVF
= 0x04 | OPC_C0
,
904 OPC_TLBWR
= 0x06 | OPC_C0
,
905 OPC_TLBP
= 0x08 | OPC_C0
,
906 OPC_RFE
= 0x10 | OPC_C0
,
907 OPC_ERET
= 0x18 | OPC_C0
,
908 OPC_DERET
= 0x1F | OPC_C0
,
909 OPC_WAIT
= 0x20 | OPC_C0
,
912 /* Coprocessor 1 (rs field) */
913 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
915 /* Values for the fmt field in FP instructions */
917 /* 0 - 15 are reserved */
918 FMT_S
= 16, /* single fp */
919 FMT_D
= 17, /* double fp */
920 FMT_E
= 18, /* extended fp */
921 FMT_Q
= 19, /* quad fp */
922 FMT_W
= 20, /* 32-bit fixed */
923 FMT_L
= 21, /* 64-bit fixed */
924 FMT_PS
= 22, /* paired single fp */
925 /* 23 - 31 are reserved */
929 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
930 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
931 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
932 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
933 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
934 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
935 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
936 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
937 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
938 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
939 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
940 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
941 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
942 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
943 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
944 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
945 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
946 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
947 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
948 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
949 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
950 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
951 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
952 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
953 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
954 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
955 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
956 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
957 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
958 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
961 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
962 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
965 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
966 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
967 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
968 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
972 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
973 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
977 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
978 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
981 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
984 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
985 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
986 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
987 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
988 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
989 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
990 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
991 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
992 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
993 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
994 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
997 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1000 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1001 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1002 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1003 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1010 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1011 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1012 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1019 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1020 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1021 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1022 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1023 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1024 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1028 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1029 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1030 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1037 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1038 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1039 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1044 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1045 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1046 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1051 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1052 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1053 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1058 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1059 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1060 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1065 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1066 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1067 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1072 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1073 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1074 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1079 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1080 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1081 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1086 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1087 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1088 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1094 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1097 OPC_LWXC1
= 0x00 | OPC_CP3
,
1098 OPC_LDXC1
= 0x01 | OPC_CP3
,
1099 OPC_LUXC1
= 0x05 | OPC_CP3
,
1100 OPC_SWXC1
= 0x08 | OPC_CP3
,
1101 OPC_SDXC1
= 0x09 | OPC_CP3
,
1102 OPC_SUXC1
= 0x0D | OPC_CP3
,
1103 OPC_PREFX
= 0x0F | OPC_CP3
,
1104 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1105 OPC_MADD_S
= 0x20 | OPC_CP3
,
1106 OPC_MADD_D
= 0x21 | OPC_CP3
,
1107 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1108 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1109 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1110 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1111 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1112 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1113 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1114 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1115 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1116 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1120 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1122 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1123 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1124 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1125 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1126 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1127 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1128 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1129 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1130 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1131 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1132 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1133 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1134 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1135 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1136 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1137 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1138 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1139 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1140 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1141 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1142 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1144 /* MI10 instruction */
1145 OPC_LD_B
= (0x20) | OPC_MSA
,
1146 OPC_LD_H
= (0x21) | OPC_MSA
,
1147 OPC_LD_W
= (0x22) | OPC_MSA
,
1148 OPC_LD_D
= (0x23) | OPC_MSA
,
1149 OPC_ST_B
= (0x24) | OPC_MSA
,
1150 OPC_ST_H
= (0x25) | OPC_MSA
,
1151 OPC_ST_W
= (0x26) | OPC_MSA
,
1152 OPC_ST_D
= (0x27) | OPC_MSA
,
1156 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1157 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1158 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1159 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1160 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1161 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1162 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1163 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1164 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1165 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1166 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1168 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1170 /* I8 instruction */
1171 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1172 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1173 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1174 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1175 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1176 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1177 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1178 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1180 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1182 /* VEC/2R/2RF instruction */
1183 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1184 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1185 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1186 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1187 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1188 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1189 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1191 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1192 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1194 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1195 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1196 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1197 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1198 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1200 /* 2RF instruction df(bit 16) = _w, _d */
1201 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1202 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1203 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1204 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1205 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1206 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1207 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1208 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1209 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1210 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1211 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1212 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1213 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1214 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1215 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1216 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1218 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1219 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1220 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1221 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1222 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1223 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1224 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1225 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1226 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1227 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1228 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1229 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1230 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1231 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1232 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1233 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1234 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1235 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1236 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1237 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1238 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1239 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1240 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1241 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1242 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1243 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1244 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1245 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1246 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1247 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1248 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1249 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1250 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1251 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1252 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1253 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1254 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1255 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1256 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1257 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1258 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1259 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1260 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1261 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1262 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1263 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1264 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1265 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1266 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1267 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1268 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1269 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1270 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1271 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1272 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1273 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1274 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1275 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1283 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1284 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1285 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1286 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1287 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1288 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1289 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1290 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 /* 3RF instruction _df(bit 21) = _w, _d */
1295 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1296 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1297 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1298 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1299 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1300 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1303 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1323 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1327 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1330 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1337 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1338 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1339 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1340 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1341 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1342 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1343 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1344 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1347 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1348 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1349 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1352 /* global register indices */
1353 static TCGv_ptr cpu_env
;
1354 static TCGv cpu_gpr
[32], cpu_PC
;
1355 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1356 static TCGv cpu_dspctrl
, btarget
, bcond
;
1357 static TCGv_i32 hflags
;
1358 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1359 static TCGv_i64 fpu_f64
[32];
1360 static TCGv_i64 msa_wr_d
[64];
1362 #include "exec/gen-icount.h"
1364 #define gen_helper_0e0i(name, arg) do { \
1365 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1366 gen_helper_##name(cpu_env, helper_tmp); \
1367 tcg_temp_free_i32(helper_tmp); \
1370 #define gen_helper_0e1i(name, arg1, arg2) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1372 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_1e0i(name, ret, arg1) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1378 gen_helper_##name(ret, cpu_env, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1384 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1390 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1402 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 typedef struct DisasContext
{
1407 struct TranslationBlock
*tb
;
1408 target_ulong pc
, saved_pc
;
1410 int singlestep_enabled
;
1412 int32_t CP0_Config1
;
1413 /* Routine used to access memory */
1415 TCGMemOp default_tcg_memop_mask
;
1416 uint32_t hflags
, saved_hflags
;
1418 target_ulong btarget
;
1427 int CP0_LLAddr_shift
;
1432 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1433 * exception condition */
1434 BS_STOP
= 1, /* We want to stop translation for any reason */
1435 BS_BRANCH
= 2, /* We reached a branch condition */
1436 BS_EXCP
= 3, /* We reached an exception condition */
1439 static const char * const regnames
[] = {
1440 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1441 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1442 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1443 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1446 static const char * const regnames_HI
[] = {
1447 "HI0", "HI1", "HI2", "HI3",
1450 static const char * const regnames_LO
[] = {
1451 "LO0", "LO1", "LO2", "LO3",
1454 static const char * const fregnames
[] = {
1455 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1456 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1457 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1458 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1461 static const char * const msaregnames
[] = {
1462 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1463 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1464 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1465 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1466 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1467 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1468 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1469 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1470 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1471 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1472 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1473 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1474 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1475 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1476 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1477 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1480 #define LOG_DISAS(...) \
1482 if (MIPS_DEBUG_DISAS) { \
1483 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1487 #define MIPS_INVAL(op) \
1489 if (MIPS_DEBUG_DISAS) { \
1490 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1491 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1492 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1493 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1497 /* General purpose registers moves. */
1498 static inline void gen_load_gpr (TCGv t
, int reg
)
1501 tcg_gen_movi_tl(t
, 0);
1503 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1506 static inline void gen_store_gpr (TCGv t
, int reg
)
1509 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1512 /* Moves to/from shadow registers. */
1513 static inline void gen_load_srsgpr (int from
, int to
)
1515 TCGv t0
= tcg_temp_new();
1518 tcg_gen_movi_tl(t0
, 0);
1520 TCGv_i32 t2
= tcg_temp_new_i32();
1521 TCGv_ptr addr
= tcg_temp_new_ptr();
1523 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1524 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1525 tcg_gen_andi_i32(t2
, t2
, 0xf);
1526 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1527 tcg_gen_ext_i32_ptr(addr
, t2
);
1528 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1530 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1531 tcg_temp_free_ptr(addr
);
1532 tcg_temp_free_i32(t2
);
1534 gen_store_gpr(t0
, to
);
1538 static inline void gen_store_srsgpr (int from
, int to
)
1541 TCGv t0
= tcg_temp_new();
1542 TCGv_i32 t2
= tcg_temp_new_i32();
1543 TCGv_ptr addr
= tcg_temp_new_ptr();
1545 gen_load_gpr(t0
, from
);
1546 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1547 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1548 tcg_gen_andi_i32(t2
, t2
, 0xf);
1549 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1550 tcg_gen_ext_i32_ptr(addr
, t2
);
1551 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1553 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1554 tcg_temp_free_ptr(addr
);
1555 tcg_temp_free_i32(t2
);
1561 static inline void gen_save_pc(target_ulong pc
)
1563 tcg_gen_movi_tl(cpu_PC
, pc
);
1566 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1568 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1569 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1570 gen_save_pc(ctx
->pc
);
1571 ctx
->saved_pc
= ctx
->pc
;
1573 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1574 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1575 ctx
->saved_hflags
= ctx
->hflags
;
1576 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1582 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1588 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1590 ctx
->saved_hflags
= ctx
->hflags
;
1591 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1597 ctx
->btarget
= env
->btarget
;
1602 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1604 TCGv_i32 texcp
= tcg_const_i32(excp
);
1605 TCGv_i32 terr
= tcg_const_i32(err
);
1606 save_cpu_state(ctx
, 1);
1607 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1608 tcg_temp_free_i32(terr
);
1609 tcg_temp_free_i32(texcp
);
1610 ctx
->bstate
= BS_EXCP
;
1613 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1615 gen_helper_0e0i(raise_exception
, excp
);
1618 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1620 generate_exception_err(ctx
, excp
, 0);
1623 /* Floating point register moves. */
1624 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1626 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1627 generate_exception(ctx
, EXCP_RI
);
1629 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1632 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1635 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1636 generate_exception(ctx
, EXCP_RI
);
1638 t64
= tcg_temp_new_i64();
1639 tcg_gen_extu_i32_i64(t64
, t
);
1640 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1641 tcg_temp_free_i64(t64
);
1644 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1646 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1647 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1649 gen_load_fpr32(ctx
, t
, reg
| 1);
1653 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1655 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1656 TCGv_i64 t64
= tcg_temp_new_i64();
1657 tcg_gen_extu_i32_i64(t64
, t
);
1658 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1659 tcg_temp_free_i64(t64
);
1661 gen_store_fpr32(ctx
, t
, reg
| 1);
1665 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1668 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1670 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1674 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1676 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1677 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1680 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1681 t0
= tcg_temp_new_i64();
1682 tcg_gen_shri_i64(t0
, t
, 32);
1683 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1684 tcg_temp_free_i64(t0
);
1688 static inline int get_fp_bit (int cc
)
1696 /* Addresses computation */
1697 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1699 tcg_gen_add_tl(ret
, arg0
, arg1
);
1701 #if defined(TARGET_MIPS64)
1702 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1703 tcg_gen_ext32s_i64(ret
, ret
);
1708 /* Addresses computation (translation time) */
1709 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1712 target_long sum
= base
+ offset
;
1714 #if defined(TARGET_MIPS64)
1715 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1722 /* Sign-extract the low 32-bits to a target_long. */
1723 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1725 #if defined(TARGET_MIPS64)
1726 tcg_gen_ext32s_i64(ret
, arg
);
1728 tcg_gen_extrl_i64_i32(ret
, arg
);
1732 /* Sign-extract the high 32-bits to a target_long. */
1733 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1735 #if defined(TARGET_MIPS64)
1736 tcg_gen_sari_i64(ret
, arg
, 32);
1738 tcg_gen_extrh_i64_i32(ret
, arg
);
1742 static inline void check_cp0_enabled(DisasContext
*ctx
)
1744 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1745 generate_exception_err(ctx
, EXCP_CpU
, 0);
1748 static inline void check_cp1_enabled(DisasContext
*ctx
)
1750 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1751 generate_exception_err(ctx
, EXCP_CpU
, 1);
1754 /* Verify that the processor is running with COP1X instructions enabled.
1755 This is associated with the nabla symbol in the MIPS32 and MIPS64
1758 static inline void check_cop1x(DisasContext
*ctx
)
1760 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1761 generate_exception_end(ctx
, EXCP_RI
);
1764 /* Verify that the processor is running with 64-bit floating-point
1765 operations enabled. */
1767 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1769 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1770 generate_exception_end(ctx
, EXCP_RI
);
1774 * Verify if floating point register is valid; an operation is not defined
1775 * if bit 0 of any register specification is set and the FR bit in the
1776 * Status register equals zero, since the register numbers specify an
1777 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1778 * in the Status register equals one, both even and odd register numbers
1779 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1781 * Multiple 64 bit wide registers can be checked by calling
1782 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1784 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1786 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1787 generate_exception_end(ctx
, EXCP_RI
);
1790 /* Verify that the processor is running with DSP instructions enabled.
1791 This is enabled by CP0 Status register MX(24) bit.
1794 static inline void check_dsp(DisasContext
*ctx
)
1796 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1797 if (ctx
->insn_flags
& ASE_DSP
) {
1798 generate_exception_end(ctx
, EXCP_DSPDIS
);
1800 generate_exception_end(ctx
, EXCP_RI
);
1805 static inline void check_dspr2(DisasContext
*ctx
)
1807 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1808 if (ctx
->insn_flags
& ASE_DSP
) {
1809 generate_exception_end(ctx
, EXCP_DSPDIS
);
1811 generate_exception_end(ctx
, EXCP_RI
);
1816 /* This code generates a "reserved instruction" exception if the
1817 CPU does not support the instruction set corresponding to flags. */
1818 static inline void check_insn(DisasContext
*ctx
, int flags
)
1820 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1821 generate_exception_end(ctx
, EXCP_RI
);
1825 /* This code generates a "reserved instruction" exception if the
1826 CPU has corresponding flag set which indicates that the instruction
1827 has been removed. */
1828 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1830 if (unlikely(ctx
->insn_flags
& flags
)) {
1831 generate_exception_end(ctx
, EXCP_RI
);
1835 /* This code generates a "reserved instruction" exception if the
1836 CPU does not support 64-bit paired-single (PS) floating point data type */
1837 static inline void check_ps(DisasContext
*ctx
)
1839 if (unlikely(!ctx
->ps
)) {
1840 generate_exception(ctx
, EXCP_RI
);
1842 check_cp1_64bitmode(ctx
);
1845 #ifdef TARGET_MIPS64
1846 /* This code generates a "reserved instruction" exception if 64-bit
1847 instructions are not enabled. */
1848 static inline void check_mips_64(DisasContext
*ctx
)
1850 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1851 generate_exception_end(ctx
, EXCP_RI
);
1855 #ifndef CONFIG_USER_ONLY
1856 static inline void check_mvh(DisasContext
*ctx
)
1858 if (unlikely(!ctx
->mvh
)) {
1859 generate_exception(ctx
, EXCP_RI
);
1864 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1865 calling interface for 32 and 64-bit FPRs. No sense in changing
1866 all callers for gen_load_fpr32 when we need the CTX parameter for
1868 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1869 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1870 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1871 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1872 int ft, int fs, int cc) \
1874 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1875 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1884 check_cp1_registers(ctx, fs | ft); \
1892 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1893 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1895 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1896 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1897 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1898 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1899 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1900 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1901 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1902 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1903 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1904 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1905 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1906 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1907 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1908 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1909 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1910 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1913 tcg_temp_free_i##bits (fp0); \
1914 tcg_temp_free_i##bits (fp1); \
1917 FOP_CONDS(, 0, d
, FMT_D
, 64)
1918 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1919 FOP_CONDS(, 0, s
, FMT_S
, 32)
1920 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1921 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1922 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1925 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1926 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1927 int ft, int fs, int fd) \
1929 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1930 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1931 if (ifmt == FMT_D) { \
1932 check_cp1_registers(ctx, fs | ft | fd); \
1934 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1935 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1938 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1941 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1944 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1947 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1950 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2007 tcg_temp_free_i ## bits (fp0); \
2008 tcg_temp_free_i ## bits (fp1); \
2011 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2012 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2014 #undef gen_ldcmp_fpr32
2015 #undef gen_ldcmp_fpr64
2017 /* load/store instructions. */
2018 #ifdef CONFIG_USER_ONLY
2019 #define OP_LD_ATOMIC(insn,fname) \
2020 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2022 TCGv t0 = tcg_temp_new(); \
2023 tcg_gen_mov_tl(t0, arg1); \
2024 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2025 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2026 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2027 tcg_temp_free(t0); \
2030 #define OP_LD_ATOMIC(insn,fname) \
2031 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2033 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2036 OP_LD_ATOMIC(ll
,ld32s
);
2037 #if defined(TARGET_MIPS64)
2038 OP_LD_ATOMIC(lld
,ld64
);
2042 #ifdef CONFIG_USER_ONLY
2043 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2044 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2046 TCGv t0 = tcg_temp_new(); \
2047 TCGLabel *l1 = gen_new_label(); \
2048 TCGLabel *l2 = gen_new_label(); \
2050 tcg_gen_andi_tl(t0, arg2, almask); \
2051 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2052 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2053 generate_exception(ctx, EXCP_AdES); \
2054 gen_set_label(l1); \
2055 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2056 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2057 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2058 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2059 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2060 generate_exception_end(ctx, EXCP_SC); \
2061 gen_set_label(l2); \
2062 tcg_gen_movi_tl(t0, 0); \
2063 gen_store_gpr(t0, rt); \
2064 tcg_temp_free(t0); \
2067 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2068 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2070 TCGv t0 = tcg_temp_new(); \
2071 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2072 gen_store_gpr(t0, rt); \
2073 tcg_temp_free(t0); \
2076 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2077 #if defined(TARGET_MIPS64)
2078 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2082 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2083 int base
, int16_t offset
)
2086 tcg_gen_movi_tl(addr
, offset
);
2087 } else if (offset
== 0) {
2088 gen_load_gpr(addr
, base
);
2090 tcg_gen_movi_tl(addr
, offset
);
2091 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2095 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2097 target_ulong pc
= ctx
->pc
;
2099 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2100 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2105 pc
&= ~(target_ulong
)3;
2110 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2111 int rt
, int base
, int16_t offset
)
2115 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2116 /* Loongson CPU uses a load to zero register for prefetch.
2117 We emulate it as a NOP. On other CPU we must perform the
2118 actual memory access. */
2122 t0
= tcg_temp_new();
2123 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2126 #if defined(TARGET_MIPS64)
2128 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2129 ctx
->default_tcg_memop_mask
);
2130 gen_store_gpr(t0
, rt
);
2133 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2134 ctx
->default_tcg_memop_mask
);
2135 gen_store_gpr(t0
, rt
);
2139 op_ld_lld(t0
, t0
, ctx
);
2140 gen_store_gpr(t0
, rt
);
2143 t1
= tcg_temp_new();
2144 /* Do a byte access to possibly trigger a page
2145 fault with the unaligned address. */
2146 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2147 tcg_gen_andi_tl(t1
, t0
, 7);
2148 #ifndef TARGET_WORDS_BIGENDIAN
2149 tcg_gen_xori_tl(t1
, t1
, 7);
2151 tcg_gen_shli_tl(t1
, t1
, 3);
2152 tcg_gen_andi_tl(t0
, t0
, ~7);
2153 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2154 tcg_gen_shl_tl(t0
, t0
, t1
);
2155 t2
= tcg_const_tl(-1);
2156 tcg_gen_shl_tl(t2
, t2
, t1
);
2157 gen_load_gpr(t1
, rt
);
2158 tcg_gen_andc_tl(t1
, t1
, t2
);
2160 tcg_gen_or_tl(t0
, t0
, t1
);
2162 gen_store_gpr(t0
, rt
);
2165 t1
= tcg_temp_new();
2166 /* Do a byte access to possibly trigger a page
2167 fault with the unaligned address. */
2168 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2169 tcg_gen_andi_tl(t1
, t0
, 7);
2170 #ifdef TARGET_WORDS_BIGENDIAN
2171 tcg_gen_xori_tl(t1
, t1
, 7);
2173 tcg_gen_shli_tl(t1
, t1
, 3);
2174 tcg_gen_andi_tl(t0
, t0
, ~7);
2175 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2176 tcg_gen_shr_tl(t0
, t0
, t1
);
2177 tcg_gen_xori_tl(t1
, t1
, 63);
2178 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2179 tcg_gen_shl_tl(t2
, t2
, t1
);
2180 gen_load_gpr(t1
, rt
);
2181 tcg_gen_and_tl(t1
, t1
, t2
);
2183 tcg_gen_or_tl(t0
, t0
, t1
);
2185 gen_store_gpr(t0
, rt
);
2188 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2189 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2191 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2192 gen_store_gpr(t0
, rt
);
2196 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2197 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2200 gen_store_gpr(t0
, rt
);
2203 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2204 ctx
->default_tcg_memop_mask
);
2205 gen_store_gpr(t0
, rt
);
2208 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2209 ctx
->default_tcg_memop_mask
);
2210 gen_store_gpr(t0
, rt
);
2213 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2214 ctx
->default_tcg_memop_mask
);
2215 gen_store_gpr(t0
, rt
);
2218 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2219 gen_store_gpr(t0
, rt
);
2222 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2223 gen_store_gpr(t0
, rt
);
2226 t1
= tcg_temp_new();
2227 /* Do a byte access to possibly trigger a page
2228 fault with the unaligned address. */
2229 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2230 tcg_gen_andi_tl(t1
, t0
, 3);
2231 #ifndef TARGET_WORDS_BIGENDIAN
2232 tcg_gen_xori_tl(t1
, t1
, 3);
2234 tcg_gen_shli_tl(t1
, t1
, 3);
2235 tcg_gen_andi_tl(t0
, t0
, ~3);
2236 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2237 tcg_gen_shl_tl(t0
, t0
, t1
);
2238 t2
= tcg_const_tl(-1);
2239 tcg_gen_shl_tl(t2
, t2
, t1
);
2240 gen_load_gpr(t1
, rt
);
2241 tcg_gen_andc_tl(t1
, t1
, t2
);
2243 tcg_gen_or_tl(t0
, t0
, t1
);
2245 tcg_gen_ext32s_tl(t0
, t0
);
2246 gen_store_gpr(t0
, rt
);
2249 t1
= tcg_temp_new();
2250 /* Do a byte access to possibly trigger a page
2251 fault with the unaligned address. */
2252 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2253 tcg_gen_andi_tl(t1
, t0
, 3);
2254 #ifdef TARGET_WORDS_BIGENDIAN
2255 tcg_gen_xori_tl(t1
, t1
, 3);
2257 tcg_gen_shli_tl(t1
, t1
, 3);
2258 tcg_gen_andi_tl(t0
, t0
, ~3);
2259 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2260 tcg_gen_shr_tl(t0
, t0
, t1
);
2261 tcg_gen_xori_tl(t1
, t1
, 31);
2262 t2
= tcg_const_tl(0xfffffffeull
);
2263 tcg_gen_shl_tl(t2
, t2
, t1
);
2264 gen_load_gpr(t1
, rt
);
2265 tcg_gen_and_tl(t1
, t1
, t2
);
2267 tcg_gen_or_tl(t0
, t0
, t1
);
2269 tcg_gen_ext32s_tl(t0
, t0
);
2270 gen_store_gpr(t0
, rt
);
2274 op_ld_ll(t0
, t0
, ctx
);
2275 gen_store_gpr(t0
, rt
);
2282 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2283 int base
, int16_t offset
)
2285 TCGv t0
= tcg_temp_new();
2286 TCGv t1
= tcg_temp_new();
2288 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2289 gen_load_gpr(t1
, rt
);
2291 #if defined(TARGET_MIPS64)
2293 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2294 ctx
->default_tcg_memop_mask
);
2297 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2300 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2305 ctx
->default_tcg_memop_mask
);
2308 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2309 ctx
->default_tcg_memop_mask
);
2312 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2315 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2318 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2326 /* Store conditional */
2327 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2328 int base
, int16_t offset
)
2332 #ifdef CONFIG_USER_ONLY
2333 t0
= tcg_temp_local_new();
2334 t1
= tcg_temp_local_new();
2336 t0
= tcg_temp_new();
2337 t1
= tcg_temp_new();
2339 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2340 gen_load_gpr(t1
, rt
);
2342 #if defined(TARGET_MIPS64)
2345 op_st_scd(t1
, t0
, rt
, ctx
);
2350 op_st_sc(t1
, t0
, rt
, ctx
);
2357 /* Load and store */
2358 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2359 int base
, int16_t offset
)
2361 TCGv t0
= tcg_temp_new();
2363 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2364 /* Don't do NOP if destination is zero: we must perform the actual
2369 TCGv_i32 fp0
= tcg_temp_new_i32();
2370 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2371 ctx
->default_tcg_memop_mask
);
2372 gen_store_fpr32(ctx
, fp0
, ft
);
2373 tcg_temp_free_i32(fp0
);
2378 TCGv_i32 fp0
= tcg_temp_new_i32();
2379 gen_load_fpr32(ctx
, fp0
, ft
);
2380 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2381 ctx
->default_tcg_memop_mask
);
2382 tcg_temp_free_i32(fp0
);
2387 TCGv_i64 fp0
= tcg_temp_new_i64();
2388 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2389 ctx
->default_tcg_memop_mask
);
2390 gen_store_fpr64(ctx
, fp0
, ft
);
2391 tcg_temp_free_i64(fp0
);
2396 TCGv_i64 fp0
= tcg_temp_new_i64();
2397 gen_load_fpr64(ctx
, fp0
, ft
);
2398 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2399 ctx
->default_tcg_memop_mask
);
2400 tcg_temp_free_i64(fp0
);
2404 MIPS_INVAL("flt_ldst");
2405 generate_exception_end(ctx
, EXCP_RI
);
2412 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2413 int rs
, int16_t imm
)
2415 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2416 check_cp1_enabled(ctx
);
2420 check_insn(ctx
, ISA_MIPS2
);
2423 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2426 generate_exception_err(ctx
, EXCP_CpU
, 1);
2430 /* Arithmetic with immediate operand */
2431 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2432 int rt
, int rs
, int16_t imm
)
2434 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2436 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2437 /* If no destination, treat it as a NOP.
2438 For addi, we must generate the overflow exception when needed. */
2444 TCGv t0
= tcg_temp_local_new();
2445 TCGv t1
= tcg_temp_new();
2446 TCGv t2
= tcg_temp_new();
2447 TCGLabel
*l1
= gen_new_label();
2449 gen_load_gpr(t1
, rs
);
2450 tcg_gen_addi_tl(t0
, t1
, uimm
);
2451 tcg_gen_ext32s_tl(t0
, t0
);
2453 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2454 tcg_gen_xori_tl(t2
, t0
, uimm
);
2455 tcg_gen_and_tl(t1
, t1
, t2
);
2457 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2459 /* operands of same sign, result different sign */
2460 generate_exception(ctx
, EXCP_OVERFLOW
);
2462 tcg_gen_ext32s_tl(t0
, t0
);
2463 gen_store_gpr(t0
, rt
);
2469 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2470 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2472 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2475 #if defined(TARGET_MIPS64)
2478 TCGv t0
= tcg_temp_local_new();
2479 TCGv t1
= tcg_temp_new();
2480 TCGv t2
= tcg_temp_new();
2481 TCGLabel
*l1
= gen_new_label();
2483 gen_load_gpr(t1
, rs
);
2484 tcg_gen_addi_tl(t0
, t1
, uimm
);
2486 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2487 tcg_gen_xori_tl(t2
, t0
, uimm
);
2488 tcg_gen_and_tl(t1
, t1
, t2
);
2490 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2492 /* operands of same sign, result different sign */
2493 generate_exception(ctx
, EXCP_OVERFLOW
);
2495 gen_store_gpr(t0
, rt
);
2501 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2503 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2510 /* Logic with immediate operand */
2511 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2512 int rt
, int rs
, int16_t imm
)
2517 /* If no destination, treat it as a NOP. */
2520 uimm
= (uint16_t)imm
;
2523 if (likely(rs
!= 0))
2524 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2526 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2530 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2532 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2535 if (likely(rs
!= 0))
2536 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2541 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2543 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2544 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2546 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2555 /* Set on less than with immediate operand */
2556 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2557 int rt
, int rs
, int16_t imm
)
2559 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2563 /* If no destination, treat it as a NOP. */
2566 t0
= tcg_temp_new();
2567 gen_load_gpr(t0
, rs
);
2570 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2573 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2579 /* Shifts with immediate operand */
2580 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2581 int rt
, int rs
, int16_t imm
)
2583 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2587 /* If no destination, treat it as a NOP. */
2591 t0
= tcg_temp_new();
2592 gen_load_gpr(t0
, rs
);
2595 tcg_gen_shli_tl(t0
, t0
, uimm
);
2596 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2599 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2603 tcg_gen_ext32u_tl(t0
, t0
);
2604 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2606 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2611 TCGv_i32 t1
= tcg_temp_new_i32();
2613 tcg_gen_trunc_tl_i32(t1
, t0
);
2614 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2615 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2616 tcg_temp_free_i32(t1
);
2618 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2621 #if defined(TARGET_MIPS64)
2623 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2626 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2629 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2633 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2639 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2642 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2645 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2648 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2657 int rd
, int rs
, int rt
)
2659 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2660 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2661 /* If no destination, treat it as a NOP.
2662 For add & sub, we must generate the overflow exception when needed. */
2669 TCGv t0
= tcg_temp_local_new();
2670 TCGv t1
= tcg_temp_new();
2671 TCGv t2
= tcg_temp_new();
2672 TCGLabel
*l1
= gen_new_label();
2674 gen_load_gpr(t1
, rs
);
2675 gen_load_gpr(t2
, rt
);
2676 tcg_gen_add_tl(t0
, t1
, t2
);
2677 tcg_gen_ext32s_tl(t0
, t0
);
2678 tcg_gen_xor_tl(t1
, t1
, t2
);
2679 tcg_gen_xor_tl(t2
, t0
, t2
);
2680 tcg_gen_andc_tl(t1
, t2
, t1
);
2682 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2684 /* operands of same sign, result different sign */
2685 generate_exception(ctx
, EXCP_OVERFLOW
);
2687 gen_store_gpr(t0
, rd
);
2692 if (rs
!= 0 && rt
!= 0) {
2693 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2694 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2695 } else if (rs
== 0 && rt
!= 0) {
2696 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2697 } else if (rs
!= 0 && rt
== 0) {
2698 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2700 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2705 TCGv t0
= tcg_temp_local_new();
2706 TCGv t1
= tcg_temp_new();
2707 TCGv t2
= tcg_temp_new();
2708 TCGLabel
*l1
= gen_new_label();
2710 gen_load_gpr(t1
, rs
);
2711 gen_load_gpr(t2
, rt
);
2712 tcg_gen_sub_tl(t0
, t1
, t2
);
2713 tcg_gen_ext32s_tl(t0
, t0
);
2714 tcg_gen_xor_tl(t2
, t1
, t2
);
2715 tcg_gen_xor_tl(t1
, t0
, t1
);
2716 tcg_gen_and_tl(t1
, t1
, t2
);
2718 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2720 /* operands of different sign, first operand and result different sign */
2721 generate_exception(ctx
, EXCP_OVERFLOW
);
2723 gen_store_gpr(t0
, rd
);
2728 if (rs
!= 0 && rt
!= 0) {
2729 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2730 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2731 } else if (rs
== 0 && rt
!= 0) {
2732 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2733 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2734 } else if (rs
!= 0 && rt
== 0) {
2735 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2737 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2740 #if defined(TARGET_MIPS64)
2743 TCGv t0
= tcg_temp_local_new();
2744 TCGv t1
= tcg_temp_new();
2745 TCGv t2
= tcg_temp_new();
2746 TCGLabel
*l1
= gen_new_label();
2748 gen_load_gpr(t1
, rs
);
2749 gen_load_gpr(t2
, rt
);
2750 tcg_gen_add_tl(t0
, t1
, t2
);
2751 tcg_gen_xor_tl(t1
, t1
, t2
);
2752 tcg_gen_xor_tl(t2
, t0
, t2
);
2753 tcg_gen_andc_tl(t1
, t2
, t1
);
2755 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2757 /* operands of same sign, result different sign */
2758 generate_exception(ctx
, EXCP_OVERFLOW
);
2760 gen_store_gpr(t0
, rd
);
2765 if (rs
!= 0 && rt
!= 0) {
2766 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2767 } else if (rs
== 0 && rt
!= 0) {
2768 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2769 } else if (rs
!= 0 && rt
== 0) {
2770 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2772 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2777 TCGv t0
= tcg_temp_local_new();
2778 TCGv t1
= tcg_temp_new();
2779 TCGv t2
= tcg_temp_new();
2780 TCGLabel
*l1
= gen_new_label();
2782 gen_load_gpr(t1
, rs
);
2783 gen_load_gpr(t2
, rt
);
2784 tcg_gen_sub_tl(t0
, t1
, t2
);
2785 tcg_gen_xor_tl(t2
, t1
, t2
);
2786 tcg_gen_xor_tl(t1
, t0
, t1
);
2787 tcg_gen_and_tl(t1
, t1
, t2
);
2789 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2791 /* operands of different sign, first operand and result different sign */
2792 generate_exception(ctx
, EXCP_OVERFLOW
);
2794 gen_store_gpr(t0
, rd
);
2799 if (rs
!= 0 && rt
!= 0) {
2800 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2801 } else if (rs
== 0 && rt
!= 0) {
2802 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2803 } else if (rs
!= 0 && rt
== 0) {
2804 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2811 if (likely(rs
!= 0 && rt
!= 0)) {
2812 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2813 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2821 /* Conditional move */
2822 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2823 int rd
, int rs
, int rt
)
2828 /* If no destination, treat it as a NOP. */
2832 t0
= tcg_temp_new();
2833 gen_load_gpr(t0
, rt
);
2834 t1
= tcg_const_tl(0);
2835 t2
= tcg_temp_new();
2836 gen_load_gpr(t2
, rs
);
2839 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2842 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2845 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2848 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2857 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2858 int rd
, int rs
, int rt
)
2861 /* If no destination, treat it as a NOP. */
2867 if (likely(rs
!= 0 && rt
!= 0)) {
2868 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2874 if (rs
!= 0 && rt
!= 0) {
2875 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2876 } else if (rs
== 0 && rt
!= 0) {
2877 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2878 } else if (rs
!= 0 && rt
== 0) {
2879 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2881 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2885 if (likely(rs
!= 0 && rt
!= 0)) {
2886 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2887 } else if (rs
== 0 && rt
!= 0) {
2888 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2889 } else if (rs
!= 0 && rt
== 0) {
2890 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2896 if (likely(rs
!= 0 && rt
!= 0)) {
2897 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2898 } else if (rs
== 0 && rt
!= 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2900 } else if (rs
!= 0 && rt
== 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2903 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2909 /* Set on lower than */
2910 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2911 int rd
, int rs
, int rt
)
2916 /* If no destination, treat it as a NOP. */
2920 t0
= tcg_temp_new();
2921 t1
= tcg_temp_new();
2922 gen_load_gpr(t0
, rs
);
2923 gen_load_gpr(t1
, rt
);
2926 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2929 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2937 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2938 int rd
, int rs
, int rt
)
2943 /* If no destination, treat it as a NOP.
2944 For add & sub, we must generate the overflow exception when needed. */
2948 t0
= tcg_temp_new();
2949 t1
= tcg_temp_new();
2950 gen_load_gpr(t0
, rs
);
2951 gen_load_gpr(t1
, rt
);
2954 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2955 tcg_gen_shl_tl(t0
, t1
, t0
);
2956 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2959 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2960 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2963 tcg_gen_ext32u_tl(t1
, t1
);
2964 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2965 tcg_gen_shr_tl(t0
, t1
, t0
);
2966 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2970 TCGv_i32 t2
= tcg_temp_new_i32();
2971 TCGv_i32 t3
= tcg_temp_new_i32();
2973 tcg_gen_trunc_tl_i32(t2
, t0
);
2974 tcg_gen_trunc_tl_i32(t3
, t1
);
2975 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2976 tcg_gen_rotr_i32(t2
, t3
, t2
);
2977 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2978 tcg_temp_free_i32(t2
);
2979 tcg_temp_free_i32(t3
);
2982 #if defined(TARGET_MIPS64)
2984 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2985 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2988 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2989 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2992 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2993 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3005 /* Arithmetic on HI/LO registers */
3006 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3008 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3019 #if defined(TARGET_MIPS64)
3021 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3025 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3029 #if defined(TARGET_MIPS64)
3031 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3035 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3040 #if defined(TARGET_MIPS64)
3042 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3046 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3049 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3054 #if defined(TARGET_MIPS64)
3056 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3060 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3063 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3069 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3072 TCGv t0
= tcg_const_tl(addr
);
3073 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3074 gen_store_gpr(t0
, reg
);
3078 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3084 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3087 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3088 addr
= addr_add(ctx
, pc
, offset
);
3089 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3093 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3094 addr
= addr_add(ctx
, pc
, offset
);
3095 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3097 #if defined(TARGET_MIPS64)
3100 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3101 addr
= addr_add(ctx
, pc
, offset
);
3102 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3106 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3109 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3110 addr
= addr_add(ctx
, pc
, offset
);
3111 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3116 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3117 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3118 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3121 #if defined(TARGET_MIPS64)
3122 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3123 case R6_OPC_LDPC
+ (1 << 16):
3124 case R6_OPC_LDPC
+ (2 << 16):
3125 case R6_OPC_LDPC
+ (3 << 16):
3127 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3128 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3129 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3133 MIPS_INVAL("OPC_PCREL");
3134 generate_exception_end(ctx
, EXCP_RI
);
3141 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3150 t0
= tcg_temp_new();
3151 t1
= tcg_temp_new();
3153 gen_load_gpr(t0
, rs
);
3154 gen_load_gpr(t1
, rt
);
3159 TCGv t2
= tcg_temp_new();
3160 TCGv t3
= tcg_temp_new();
3161 tcg_gen_ext32s_tl(t0
, t0
);
3162 tcg_gen_ext32s_tl(t1
, t1
);
3163 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3164 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3165 tcg_gen_and_tl(t2
, t2
, t3
);
3166 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3167 tcg_gen_or_tl(t2
, t2
, t3
);
3168 tcg_gen_movi_tl(t3
, 0);
3169 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3170 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3171 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3178 TCGv t2
= tcg_temp_new();
3179 TCGv t3
= tcg_temp_new();
3180 tcg_gen_ext32s_tl(t0
, t0
);
3181 tcg_gen_ext32s_tl(t1
, t1
);
3182 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3183 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3184 tcg_gen_and_tl(t2
, t2
, t3
);
3185 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3186 tcg_gen_or_tl(t2
, t2
, t3
);
3187 tcg_gen_movi_tl(t3
, 0);
3188 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3189 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3190 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3197 TCGv t2
= tcg_const_tl(0);
3198 TCGv t3
= tcg_const_tl(1);
3199 tcg_gen_ext32u_tl(t0
, t0
);
3200 tcg_gen_ext32u_tl(t1
, t1
);
3201 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3202 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3203 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3210 TCGv t2
= tcg_const_tl(0);
3211 TCGv t3
= tcg_const_tl(1);
3212 tcg_gen_ext32u_tl(t0
, t0
);
3213 tcg_gen_ext32u_tl(t1
, t1
);
3214 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3215 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3216 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3223 TCGv_i32 t2
= tcg_temp_new_i32();
3224 TCGv_i32 t3
= tcg_temp_new_i32();
3225 tcg_gen_trunc_tl_i32(t2
, t0
);
3226 tcg_gen_trunc_tl_i32(t3
, t1
);
3227 tcg_gen_mul_i32(t2
, t2
, t3
);
3228 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3229 tcg_temp_free_i32(t2
);
3230 tcg_temp_free_i32(t3
);
3235 TCGv_i32 t2
= tcg_temp_new_i32();
3236 TCGv_i32 t3
= tcg_temp_new_i32();
3237 tcg_gen_trunc_tl_i32(t2
, t0
);
3238 tcg_gen_trunc_tl_i32(t3
, t1
);
3239 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3241 tcg_temp_free_i32(t2
);
3242 tcg_temp_free_i32(t3
);
3247 TCGv_i32 t2
= tcg_temp_new_i32();
3248 TCGv_i32 t3
= tcg_temp_new_i32();
3249 tcg_gen_trunc_tl_i32(t2
, t0
);
3250 tcg_gen_trunc_tl_i32(t3
, t1
);
3251 tcg_gen_mul_i32(t2
, t2
, t3
);
3252 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3253 tcg_temp_free_i32(t2
);
3254 tcg_temp_free_i32(t3
);
3259 TCGv_i32 t2
= tcg_temp_new_i32();
3260 TCGv_i32 t3
= tcg_temp_new_i32();
3261 tcg_gen_trunc_tl_i32(t2
, t0
);
3262 tcg_gen_trunc_tl_i32(t3
, t1
);
3263 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3264 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3265 tcg_temp_free_i32(t2
);
3266 tcg_temp_free_i32(t3
);
3269 #if defined(TARGET_MIPS64)
3272 TCGv t2
= tcg_temp_new();
3273 TCGv t3
= tcg_temp_new();
3274 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3275 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3276 tcg_gen_and_tl(t2
, t2
, t3
);
3277 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3278 tcg_gen_or_tl(t2
, t2
, t3
);
3279 tcg_gen_movi_tl(t3
, 0);
3280 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3281 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3288 TCGv t2
= tcg_temp_new();
3289 TCGv t3
= tcg_temp_new();
3290 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3291 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3292 tcg_gen_and_tl(t2
, t2
, t3
);
3293 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3294 tcg_gen_or_tl(t2
, t2
, t3
);
3295 tcg_gen_movi_tl(t3
, 0);
3296 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3297 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3304 TCGv t2
= tcg_const_tl(0);
3305 TCGv t3
= tcg_const_tl(1);
3306 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3307 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3314 TCGv t2
= tcg_const_tl(0);
3315 TCGv t3
= tcg_const_tl(1);
3316 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3317 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3323 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3327 TCGv t2
= tcg_temp_new();
3328 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3333 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3337 TCGv t2
= tcg_temp_new();
3338 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3344 MIPS_INVAL("r6 mul/div");
3345 generate_exception_end(ctx
, EXCP_RI
);
3353 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3354 int acc
, int rs
, int rt
)
3358 t0
= tcg_temp_new();
3359 t1
= tcg_temp_new();
3361 gen_load_gpr(t0
, rs
);
3362 gen_load_gpr(t1
, rt
);
3371 TCGv t2
= tcg_temp_new();
3372 TCGv t3
= tcg_temp_new();
3373 tcg_gen_ext32s_tl(t0
, t0
);
3374 tcg_gen_ext32s_tl(t1
, t1
);
3375 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3376 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3377 tcg_gen_and_tl(t2
, t2
, t3
);
3378 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3379 tcg_gen_or_tl(t2
, t2
, t3
);
3380 tcg_gen_movi_tl(t3
, 0);
3381 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3382 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3383 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3384 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3385 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3392 TCGv t2
= tcg_const_tl(0);
3393 TCGv t3
= tcg_const_tl(1);
3394 tcg_gen_ext32u_tl(t0
, t0
);
3395 tcg_gen_ext32u_tl(t1
, t1
);
3396 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3397 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3398 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3399 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3400 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3407 TCGv_i32 t2
= tcg_temp_new_i32();
3408 TCGv_i32 t3
= tcg_temp_new_i32();
3409 tcg_gen_trunc_tl_i32(t2
, t0
);
3410 tcg_gen_trunc_tl_i32(t3
, t1
);
3411 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3412 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3413 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3414 tcg_temp_free_i32(t2
);
3415 tcg_temp_free_i32(t3
);
3420 TCGv_i32 t2
= tcg_temp_new_i32();
3421 TCGv_i32 t3
= tcg_temp_new_i32();
3422 tcg_gen_trunc_tl_i32(t2
, t0
);
3423 tcg_gen_trunc_tl_i32(t3
, t1
);
3424 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3425 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3426 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3427 tcg_temp_free_i32(t2
);
3428 tcg_temp_free_i32(t3
);
3431 #if defined(TARGET_MIPS64)
3434 TCGv t2
= tcg_temp_new();
3435 TCGv t3
= tcg_temp_new();
3436 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3437 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3438 tcg_gen_and_tl(t2
, t2
, t3
);
3439 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3440 tcg_gen_or_tl(t2
, t2
, t3
);
3441 tcg_gen_movi_tl(t3
, 0);
3442 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3443 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3444 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3451 TCGv t2
= tcg_const_tl(0);
3452 TCGv t3
= tcg_const_tl(1);
3453 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3454 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3455 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3461 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3464 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3469 TCGv_i64 t2
= tcg_temp_new_i64();
3470 TCGv_i64 t3
= tcg_temp_new_i64();
3472 tcg_gen_ext_tl_i64(t2
, t0
);
3473 tcg_gen_ext_tl_i64(t3
, t1
);
3474 tcg_gen_mul_i64(t2
, t2
, t3
);
3475 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3476 tcg_gen_add_i64(t2
, t2
, t3
);
3477 tcg_temp_free_i64(t3
);
3478 gen_move_low32(cpu_LO
[acc
], t2
);
3479 gen_move_high32(cpu_HI
[acc
], t2
);
3480 tcg_temp_free_i64(t2
);
3485 TCGv_i64 t2
= tcg_temp_new_i64();
3486 TCGv_i64 t3
= tcg_temp_new_i64();
3488 tcg_gen_ext32u_tl(t0
, t0
);
3489 tcg_gen_ext32u_tl(t1
, t1
);
3490 tcg_gen_extu_tl_i64(t2
, t0
);
3491 tcg_gen_extu_tl_i64(t3
, t1
);
3492 tcg_gen_mul_i64(t2
, t2
, t3
);
3493 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3494 tcg_gen_add_i64(t2
, t2
, t3
);
3495 tcg_temp_free_i64(t3
);
3496 gen_move_low32(cpu_LO
[acc
], t2
);
3497 gen_move_high32(cpu_HI
[acc
], t2
);
3498 tcg_temp_free_i64(t2
);
3503 TCGv_i64 t2
= tcg_temp_new_i64();
3504 TCGv_i64 t3
= tcg_temp_new_i64();
3506 tcg_gen_ext_tl_i64(t2
, t0
);
3507 tcg_gen_ext_tl_i64(t3
, t1
);
3508 tcg_gen_mul_i64(t2
, t2
, t3
);
3509 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3510 tcg_gen_sub_i64(t2
, t3
, t2
);
3511 tcg_temp_free_i64(t3
);
3512 gen_move_low32(cpu_LO
[acc
], t2
);
3513 gen_move_high32(cpu_HI
[acc
], t2
);
3514 tcg_temp_free_i64(t2
);
3519 TCGv_i64 t2
= tcg_temp_new_i64();
3520 TCGv_i64 t3
= tcg_temp_new_i64();
3522 tcg_gen_ext32u_tl(t0
, t0
);
3523 tcg_gen_ext32u_tl(t1
, t1
);
3524 tcg_gen_extu_tl_i64(t2
, t0
);
3525 tcg_gen_extu_tl_i64(t3
, t1
);
3526 tcg_gen_mul_i64(t2
, t2
, t3
);
3527 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3528 tcg_gen_sub_i64(t2
, t3
, t2
);
3529 tcg_temp_free_i64(t3
);
3530 gen_move_low32(cpu_LO
[acc
], t2
);
3531 gen_move_high32(cpu_HI
[acc
], t2
);
3532 tcg_temp_free_i64(t2
);
3536 MIPS_INVAL("mul/div");
3537 generate_exception_end(ctx
, EXCP_RI
);
3545 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3546 int rd
, int rs
, int rt
)
3548 TCGv t0
= tcg_temp_new();
3549 TCGv t1
= tcg_temp_new();
3551 gen_load_gpr(t0
, rs
);
3552 gen_load_gpr(t1
, rt
);
3555 case OPC_VR54XX_MULS
:
3556 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3558 case OPC_VR54XX_MULSU
:
3559 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3561 case OPC_VR54XX_MACC
:
3562 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3564 case OPC_VR54XX_MACCU
:
3565 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MSAC
:
3568 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MSACU
:
3571 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MULHI
:
3574 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MULHIU
:
3577 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MULSHI
:
3580 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULSHIU
:
3583 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MACCHI
:
3586 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MACCHIU
:
3589 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MSACHI
:
3592 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MSACHIU
:
3595 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3598 MIPS_INVAL("mul vr54xx");
3599 generate_exception_end(ctx
, EXCP_RI
);
3602 gen_store_gpr(t0
, rd
);
3609 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3618 t0
= tcg_temp_new();
3619 gen_load_gpr(t0
, rs
);
3623 gen_helper_clo(cpu_gpr
[rd
], t0
);
3627 gen_helper_clz(cpu_gpr
[rd
], t0
);
3629 #if defined(TARGET_MIPS64)
3632 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3636 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3643 /* Godson integer instructions */
3644 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3645 int rd
, int rs
, int rt
)
3657 case OPC_MULTU_G_2E
:
3658 case OPC_MULTU_G_2F
:
3659 #if defined(TARGET_MIPS64)
3660 case OPC_DMULT_G_2E
:
3661 case OPC_DMULT_G_2F
:
3662 case OPC_DMULTU_G_2E
:
3663 case OPC_DMULTU_G_2F
:
3665 t0
= tcg_temp_new();
3666 t1
= tcg_temp_new();
3669 t0
= tcg_temp_local_new();
3670 t1
= tcg_temp_local_new();
3674 gen_load_gpr(t0
, rs
);
3675 gen_load_gpr(t1
, rt
);
3680 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3681 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3683 case OPC_MULTU_G_2E
:
3684 case OPC_MULTU_G_2F
:
3685 tcg_gen_ext32u_tl(t0
, t0
);
3686 tcg_gen_ext32u_tl(t1
, t1
);
3687 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3688 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3693 TCGLabel
*l1
= gen_new_label();
3694 TCGLabel
*l2
= gen_new_label();
3695 TCGLabel
*l3
= gen_new_label();
3696 tcg_gen_ext32s_tl(t0
, t0
);
3697 tcg_gen_ext32s_tl(t1
, t1
);
3698 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3699 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3702 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3703 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3704 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3707 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3708 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3715 TCGLabel
*l1
= gen_new_label();
3716 TCGLabel
*l2
= gen_new_label();
3717 tcg_gen_ext32u_tl(t0
, t0
);
3718 tcg_gen_ext32u_tl(t1
, t1
);
3719 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3720 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3723 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3724 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3731 TCGLabel
*l1
= gen_new_label();
3732 TCGLabel
*l2
= gen_new_label();
3733 TCGLabel
*l3
= gen_new_label();
3734 tcg_gen_ext32u_tl(t0
, t0
);
3735 tcg_gen_ext32u_tl(t1
, t1
);
3736 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3737 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3738 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3743 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3751 TCGLabel
*l1
= gen_new_label();
3752 TCGLabel
*l2
= gen_new_label();
3753 tcg_gen_ext32u_tl(t0
, t0
);
3754 tcg_gen_ext32u_tl(t1
, t1
);
3755 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3756 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3759 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3760 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3764 #if defined(TARGET_MIPS64)
3765 case OPC_DMULT_G_2E
:
3766 case OPC_DMULT_G_2F
:
3767 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3769 case OPC_DMULTU_G_2E
:
3770 case OPC_DMULTU_G_2F
:
3771 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3776 TCGLabel
*l1
= gen_new_label();
3777 TCGLabel
*l2
= gen_new_label();
3778 TCGLabel
*l3
= gen_new_label();
3779 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3783 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3784 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3785 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3788 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3792 case OPC_DDIVU_G_2E
:
3793 case OPC_DDIVU_G_2F
:
3795 TCGLabel
*l1
= gen_new_label();
3796 TCGLabel
*l2
= gen_new_label();
3797 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3798 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3801 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3808 TCGLabel
*l1
= gen_new_label();
3809 TCGLabel
*l2
= gen_new_label();
3810 TCGLabel
*l3
= gen_new_label();
3811 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3812 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3813 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3818 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3822 case OPC_DMODU_G_2E
:
3823 case OPC_DMODU_G_2F
:
3825 TCGLabel
*l1
= gen_new_label();
3826 TCGLabel
*l2
= gen_new_label();
3827 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3828 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3831 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3842 /* Loongson multimedia instructions */
3843 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3845 uint32_t opc
, shift_max
;
3848 opc
= MASK_LMI(ctx
->opcode
);
3854 t0
= tcg_temp_local_new_i64();
3855 t1
= tcg_temp_local_new_i64();
3858 t0
= tcg_temp_new_i64();
3859 t1
= tcg_temp_new_i64();
3863 gen_load_fpr64(ctx
, t0
, rs
);
3864 gen_load_fpr64(ctx
, t1
, rt
);
3866 #define LMI_HELPER(UP, LO) \
3867 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3868 #define LMI_HELPER_1(UP, LO) \
3869 case OPC_##UP: gen_helper_##LO(t0, t0); break
3870 #define LMI_DIRECT(UP, LO, OP) \
3871 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3874 LMI_HELPER(PADDSH
, paddsh
);
3875 LMI_HELPER(PADDUSH
, paddush
);
3876 LMI_HELPER(PADDH
, paddh
);
3877 LMI_HELPER(PADDW
, paddw
);
3878 LMI_HELPER(PADDSB
, paddsb
);
3879 LMI_HELPER(PADDUSB
, paddusb
);
3880 LMI_HELPER(PADDB
, paddb
);
3882 LMI_HELPER(PSUBSH
, psubsh
);
3883 LMI_HELPER(PSUBUSH
, psubush
);
3884 LMI_HELPER(PSUBH
, psubh
);
3885 LMI_HELPER(PSUBW
, psubw
);
3886 LMI_HELPER(PSUBSB
, psubsb
);
3887 LMI_HELPER(PSUBUSB
, psubusb
);
3888 LMI_HELPER(PSUBB
, psubb
);
3890 LMI_HELPER(PSHUFH
, pshufh
);
3891 LMI_HELPER(PACKSSWH
, packsswh
);
3892 LMI_HELPER(PACKSSHB
, packsshb
);
3893 LMI_HELPER(PACKUSHB
, packushb
);
3895 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3896 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3897 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3898 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3899 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3900 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3902 LMI_HELPER(PAVGH
, pavgh
);
3903 LMI_HELPER(PAVGB
, pavgb
);
3904 LMI_HELPER(PMAXSH
, pmaxsh
);
3905 LMI_HELPER(PMINSH
, pminsh
);
3906 LMI_HELPER(PMAXUB
, pmaxub
);
3907 LMI_HELPER(PMINUB
, pminub
);
3909 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3910 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3911 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3912 LMI_HELPER(PCMPGTH
, pcmpgth
);
3913 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3914 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3916 LMI_HELPER(PSLLW
, psllw
);
3917 LMI_HELPER(PSLLH
, psllh
);
3918 LMI_HELPER(PSRLW
, psrlw
);
3919 LMI_HELPER(PSRLH
, psrlh
);
3920 LMI_HELPER(PSRAW
, psraw
);
3921 LMI_HELPER(PSRAH
, psrah
);
3923 LMI_HELPER(PMULLH
, pmullh
);
3924 LMI_HELPER(PMULHH
, pmulhh
);
3925 LMI_HELPER(PMULHUH
, pmulhuh
);
3926 LMI_HELPER(PMADDHW
, pmaddhw
);
3928 LMI_HELPER(PASUBUB
, pasubub
);
3929 LMI_HELPER_1(BIADD
, biadd
);
3930 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3932 LMI_DIRECT(PADDD
, paddd
, add
);
3933 LMI_DIRECT(PSUBD
, psubd
, sub
);
3934 LMI_DIRECT(XOR_CP2
, xor, xor);
3935 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3936 LMI_DIRECT(AND_CP2
, and, and);
3937 LMI_DIRECT(PANDN
, pandn
, andc
);
3938 LMI_DIRECT(OR
, or, or);
3941 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3944 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3947 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3954 tcg_gen_andi_i64(t1
, t1
, 3);
3955 tcg_gen_shli_i64(t1
, t1
, 4);
3956 tcg_gen_shr_i64(t0
, t0
, t1
);
3957 tcg_gen_ext16u_i64(t0
, t0
);
3961 tcg_gen_add_i64(t0
, t0
, t1
);
3962 tcg_gen_ext32s_i64(t0
, t0
);
3965 tcg_gen_sub_i64(t0
, t0
, t1
);
3966 tcg_gen_ext32s_i64(t0
, t0
);
3988 /* Make sure shift count isn't TCG undefined behaviour. */
3989 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3994 tcg_gen_shl_i64(t0
, t0
, t1
);
3998 /* Since SRA is UndefinedResult without sign-extended inputs,
3999 we can treat SRA and DSRA the same. */
4000 tcg_gen_sar_i64(t0
, t0
, t1
);
4003 /* We want to shift in zeros for SRL; zero-extend first. */
4004 tcg_gen_ext32u_i64(t0
, t0
);
4007 tcg_gen_shr_i64(t0
, t0
, t1
);
4011 if (shift_max
== 32) {
4012 tcg_gen_ext32s_i64(t0
, t0
);
4015 /* Shifts larger than MAX produce zero. */
4016 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4017 tcg_gen_neg_i64(t1
, t1
);
4018 tcg_gen_and_i64(t0
, t0
, t1
);
4024 TCGv_i64 t2
= tcg_temp_new_i64();
4025 TCGLabel
*lab
= gen_new_label();
4027 tcg_gen_mov_i64(t2
, t0
);
4028 tcg_gen_add_i64(t0
, t1
, t2
);
4029 if (opc
== OPC_ADD_CP2
) {
4030 tcg_gen_ext32s_i64(t0
, t0
);
4032 tcg_gen_xor_i64(t1
, t1
, t2
);
4033 tcg_gen_xor_i64(t2
, t2
, t0
);
4034 tcg_gen_andc_i64(t1
, t2
, t1
);
4035 tcg_temp_free_i64(t2
);
4036 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4037 generate_exception(ctx
, EXCP_OVERFLOW
);
4045 TCGv_i64 t2
= tcg_temp_new_i64();
4046 TCGLabel
*lab
= gen_new_label();
4048 tcg_gen_mov_i64(t2
, t0
);
4049 tcg_gen_sub_i64(t0
, t1
, t2
);
4050 if (opc
== OPC_SUB_CP2
) {
4051 tcg_gen_ext32s_i64(t0
, t0
);
4053 tcg_gen_xor_i64(t1
, t1
, t2
);
4054 tcg_gen_xor_i64(t2
, t2
, t0
);
4055 tcg_gen_and_i64(t1
, t1
, t2
);
4056 tcg_temp_free_i64(t2
);
4057 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4058 generate_exception(ctx
, EXCP_OVERFLOW
);
4064 tcg_gen_ext32u_i64(t0
, t0
);
4065 tcg_gen_ext32u_i64(t1
, t1
);
4066 tcg_gen_mul_i64(t0
, t0
, t1
);
4075 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4076 FD field is the CC field? */
4078 MIPS_INVAL("loongson_cp2");
4079 generate_exception_end(ctx
, EXCP_RI
);
4086 gen_store_fpr64(ctx
, t0
, rd
);
4088 tcg_temp_free_i64(t0
);
4089 tcg_temp_free_i64(t1
);
4093 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4094 int rs
, int rt
, int16_t imm
)
4097 TCGv t0
= tcg_temp_new();
4098 TCGv t1
= tcg_temp_new();
4101 /* Load needed operands */
4109 /* Compare two registers */
4111 gen_load_gpr(t0
, rs
);
4112 gen_load_gpr(t1
, rt
);
4122 /* Compare register to immediate */
4123 if (rs
!= 0 || imm
!= 0) {
4124 gen_load_gpr(t0
, rs
);
4125 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4132 case OPC_TEQ
: /* rs == rs */
4133 case OPC_TEQI
: /* r0 == 0 */
4134 case OPC_TGE
: /* rs >= rs */
4135 case OPC_TGEI
: /* r0 >= 0 */
4136 case OPC_TGEU
: /* rs >= rs unsigned */
4137 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4139 generate_exception_end(ctx
, EXCP_TRAP
);
4141 case OPC_TLT
: /* rs < rs */
4142 case OPC_TLTI
: /* r0 < 0 */
4143 case OPC_TLTU
: /* rs < rs unsigned */
4144 case OPC_TLTIU
: /* r0 < 0 unsigned */
4145 case OPC_TNE
: /* rs != rs */
4146 case OPC_TNEI
: /* r0 != 0 */
4147 /* Never trap: treat as NOP. */
4151 TCGLabel
*l1
= gen_new_label();
4156 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4160 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4164 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4168 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4172 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4176 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4179 generate_exception(ctx
, EXCP_TRAP
);
4186 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4188 TranslationBlock
*tb
;
4190 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4191 likely(!ctx
->singlestep_enabled
)) {
4194 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4197 if (ctx
->singlestep_enabled
) {
4198 save_cpu_state(ctx
, 0);
4199 gen_helper_raise_exception_debug(cpu_env
);
4205 /* Branches (before delay slot) */
4206 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4208 int rs
, int rt
, int32_t offset
,
4211 target_ulong btgt
= -1;
4213 int bcond_compute
= 0;
4214 TCGv t0
= tcg_temp_new();
4215 TCGv t1
= tcg_temp_new();
4217 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4218 #ifdef MIPS_DEBUG_DISAS
4219 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4220 TARGET_FMT_lx
"\n", ctx
->pc
);
4222 generate_exception_end(ctx
, EXCP_RI
);
4226 /* Load needed operands */
4232 /* Compare two registers */
4234 gen_load_gpr(t0
, rs
);
4235 gen_load_gpr(t1
, rt
);
4238 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4252 /* Compare to zero */
4254 gen_load_gpr(t0
, rs
);
4257 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4260 #if defined(TARGET_MIPS64)
4262 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4264 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4267 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4272 /* Jump to immediate */
4273 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4277 /* Jump to register */
4278 if (offset
!= 0 && offset
!= 16) {
4279 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4280 others are reserved. */
4281 MIPS_INVAL("jump hint");
4282 generate_exception_end(ctx
, EXCP_RI
);
4285 gen_load_gpr(btarget
, rs
);
4288 MIPS_INVAL("branch/jump");
4289 generate_exception_end(ctx
, EXCP_RI
);
4292 if (bcond_compute
== 0) {
4293 /* No condition to be computed */
4295 case OPC_BEQ
: /* rx == rx */
4296 case OPC_BEQL
: /* rx == rx likely */
4297 case OPC_BGEZ
: /* 0 >= 0 */
4298 case OPC_BGEZL
: /* 0 >= 0 likely */
4299 case OPC_BLEZ
: /* 0 <= 0 */
4300 case OPC_BLEZL
: /* 0 <= 0 likely */
4302 ctx
->hflags
|= MIPS_HFLAG_B
;
4304 case OPC_BGEZAL
: /* 0 >= 0 */
4305 case OPC_BGEZALL
: /* 0 >= 0 likely */
4306 /* Always take and link */
4308 ctx
->hflags
|= MIPS_HFLAG_B
;
4310 case OPC_BNE
: /* rx != rx */
4311 case OPC_BGTZ
: /* 0 > 0 */
4312 case OPC_BLTZ
: /* 0 < 0 */
4315 case OPC_BLTZAL
: /* 0 < 0 */
4316 /* Handle as an unconditional branch to get correct delay
4319 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4320 ctx
->hflags
|= MIPS_HFLAG_B
;
4322 case OPC_BLTZALL
: /* 0 < 0 likely */
4323 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4324 /* Skip the instruction in the delay slot */
4327 case OPC_BNEL
: /* rx != rx likely */
4328 case OPC_BGTZL
: /* 0 > 0 likely */
4329 case OPC_BLTZL
: /* 0 < 0 likely */
4330 /* Skip the instruction in the delay slot */
4334 ctx
->hflags
|= MIPS_HFLAG_B
;
4337 ctx
->hflags
|= MIPS_HFLAG_BX
;
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4344 ctx
->hflags
|= MIPS_HFLAG_BR
;
4348 ctx
->hflags
|= MIPS_HFLAG_BR
;
4351 MIPS_INVAL("branch/jump");
4352 generate_exception_end(ctx
, EXCP_RI
);
4358 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4361 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4364 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4367 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4370 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4373 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4376 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4380 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4384 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4387 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4390 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4393 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4396 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4404 #if defined(TARGET_MIPS64)
4406 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4410 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4413 ctx
->hflags
|= MIPS_HFLAG_BC
;
4416 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4419 ctx
->hflags
|= MIPS_HFLAG_BL
;
4422 MIPS_INVAL("conditional branch/jump");
4423 generate_exception_end(ctx
, EXCP_RI
);
4428 ctx
->btarget
= btgt
;
4430 switch (delayslot_size
) {
4432 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4435 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4440 int post_delay
= insn_bytes
+ delayslot_size
;
4441 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4443 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4447 if (insn_bytes
== 2)
4448 ctx
->hflags
|= MIPS_HFLAG_B16
;
4453 /* special3 bitfield operations */
4454 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4455 int rs
, int lsb
, int msb
)
4457 TCGv t0
= tcg_temp_new();
4458 TCGv t1
= tcg_temp_new();
4460 gen_load_gpr(t1
, rs
);
4463 if (lsb
+ msb
> 31) {
4466 tcg_gen_shri_tl(t0
, t1
, lsb
);
4468 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4470 tcg_gen_ext32s_tl(t0
, t0
);
4473 #if defined(TARGET_MIPS64)
4482 if (lsb
+ msb
> 63) {
4485 tcg_gen_shri_tl(t0
, t1
, lsb
);
4487 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4495 gen_load_gpr(t0
, rt
);
4496 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4497 tcg_gen_ext32s_tl(t0
, t0
);
4499 #if defined(TARGET_MIPS64)
4510 gen_load_gpr(t0
, rt
);
4511 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4516 MIPS_INVAL("bitops");
4517 generate_exception_end(ctx
, EXCP_RI
);
4522 gen_store_gpr(t0
, rt
);
4527 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4532 /* If no destination, treat it as a NOP. */
4536 t0
= tcg_temp_new();
4537 gen_load_gpr(t0
, rt
);
4541 TCGv t1
= tcg_temp_new();
4543 tcg_gen_shri_tl(t1
, t0
, 8);
4544 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4545 tcg_gen_shli_tl(t0
, t0
, 8);
4546 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4547 tcg_gen_or_tl(t0
, t0
, t1
);
4549 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4553 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4556 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4558 #if defined(TARGET_MIPS64)
4561 TCGv t1
= tcg_temp_new();
4563 tcg_gen_shri_tl(t1
, t0
, 8);
4564 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4565 tcg_gen_shli_tl(t0
, t0
, 8);
4566 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4567 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4573 TCGv t1
= tcg_temp_new();
4575 tcg_gen_shri_tl(t1
, t0
, 16);
4576 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4577 tcg_gen_shli_tl(t0
, t0
, 16);
4578 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4579 tcg_gen_or_tl(t0
, t0
, t1
);
4580 tcg_gen_shri_tl(t1
, t0
, 32);
4581 tcg_gen_shli_tl(t0
, t0
, 32);
4582 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4588 MIPS_INVAL("bsfhl");
4589 generate_exception_end(ctx
, EXCP_RI
);
4596 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4605 t0
= tcg_temp_new();
4606 t1
= tcg_temp_new();
4607 gen_load_gpr(t0
, rs
);
4608 gen_load_gpr(t1
, rt
);
4609 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4610 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4611 if (opc
== OPC_LSA
) {
4612 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4621 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4629 t0
= tcg_temp_new();
4630 gen_load_gpr(t0
, rt
);
4632 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4634 TCGv t1
= tcg_temp_new();
4635 gen_load_gpr(t1
, rs
);
4639 TCGv_i64 t2
= tcg_temp_new_i64();
4640 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4641 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4642 gen_move_low32(cpu_gpr
[rd
], t2
);
4643 tcg_temp_free_i64(t2
);
4646 #if defined(TARGET_MIPS64)
4648 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4649 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4650 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4660 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4667 t0
= tcg_temp_new();
4668 gen_load_gpr(t0
, rt
);
4671 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4673 #if defined(TARGET_MIPS64)
4675 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4682 #ifndef CONFIG_USER_ONLY
4683 /* CP0 (MMU and control) */
4684 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4686 TCGv_i64 t0
= tcg_temp_new_i64();
4687 TCGv_i64 t1
= tcg_temp_new_i64();
4689 tcg_gen_ext_tl_i64(t0
, arg
);
4690 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4691 #if defined(TARGET_MIPS64)
4692 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4694 tcg_gen_concat32_i64(t1
, t1
, t0
);
4696 tcg_gen_st_i64(t1
, cpu_env
, off
);
4697 tcg_temp_free_i64(t1
);
4698 tcg_temp_free_i64(t0
);
4701 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4703 TCGv_i64 t0
= tcg_temp_new_i64();
4704 TCGv_i64 t1
= tcg_temp_new_i64();
4706 tcg_gen_ext_tl_i64(t0
, arg
);
4707 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4708 tcg_gen_concat32_i64(t1
, t1
, t0
);
4709 tcg_gen_st_i64(t1
, cpu_env
, off
);
4710 tcg_temp_free_i64(t1
);
4711 tcg_temp_free_i64(t0
);
4714 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4716 TCGv_i64 t0
= tcg_temp_new_i64();
4718 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4719 #if defined(TARGET_MIPS64)
4720 tcg_gen_shri_i64(t0
, t0
, 30);
4722 tcg_gen_shri_i64(t0
, t0
, 32);
4724 gen_move_low32(arg
, t0
);
4725 tcg_temp_free_i64(t0
);
4728 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4730 TCGv_i64 t0
= tcg_temp_new_i64();
4732 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4733 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4734 gen_move_low32(arg
, t0
);
4735 tcg_temp_free_i64(t0
);
4738 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4740 TCGv_i32 t0
= tcg_temp_new_i32();
4742 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4743 tcg_gen_ext_i32_tl(arg
, t0
);
4744 tcg_temp_free_i32(t0
);
4747 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4749 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4750 tcg_gen_ext32s_tl(arg
, arg
);
4753 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4755 TCGv_i32 t0
= tcg_temp_new_i32();
4757 tcg_gen_trunc_tl_i32(t0
, arg
);
4758 tcg_gen_st_i32(t0
, cpu_env
, off
);
4759 tcg_temp_free_i32(t0
);
4762 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4764 const char *rn
= "invalid";
4766 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4767 goto mfhc0_read_zero
;
4774 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4778 goto mfhc0_read_zero
;
4784 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4788 goto mfhc0_read_zero
;
4794 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4795 ctx
->CP0_LLAddr_shift
);
4799 goto mfhc0_read_zero
;
4808 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4812 goto mfhc0_read_zero
;
4816 goto mfhc0_read_zero
;
4819 (void)rn
; /* avoid a compiler warning */
4820 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4824 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4825 tcg_gen_movi_tl(arg
, 0);
4828 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4830 const char *rn
= "invalid";
4831 uint64_t mask
= ctx
->PAMask
>> 36;
4833 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4841 tcg_gen_andi_tl(arg
, arg
, mask
);
4842 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4852 tcg_gen_andi_tl(arg
, arg
, mask
);
4853 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4863 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4864 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4865 relevant for modern MIPS cores supporting MTHC0, therefore
4866 treating MTHC0 to LLAddr as NOP. */
4879 tcg_gen_andi_tl(arg
, arg
, mask
);
4880 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4891 (void)rn
; /* avoid a compiler warning */
4893 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4896 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4899 tcg_gen_movi_tl(arg
, 0);
4901 tcg_gen_movi_tl(arg
, ~0);
4905 #define CP0_CHECK(c) \
4908 goto cp0_unimplemented; \
4912 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4914 const char *rn
= "invalid";
4917 check_insn(ctx
, ISA_MIPS32
);
4923 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4927 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4928 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4932 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4933 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4937 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4938 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4942 goto cp0_unimplemented
;
4948 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4949 gen_helper_mfc0_random(arg
, cpu_env
);
4953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4954 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4958 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4964 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4969 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4979 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4980 rn
= "VPEScheFBack";
4983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4984 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4988 goto cp0_unimplemented
;
4995 TCGv_i64 tmp
= tcg_temp_new_i64();
4996 tcg_gen_ld_i64(tmp
, cpu_env
,
4997 offsetof(CPUMIPSState
, CP0_EntryLo0
));
4998 #if defined(TARGET_MIPS64)
5000 /* Move RI/XI fields to bits 31:30 */
5001 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5002 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5005 gen_move_low32(arg
, tmp
);
5006 tcg_temp_free_i64(tmp
);
5011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5012 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5016 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5017 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5022 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5027 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5032 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5037 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5042 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5046 goto cp0_unimplemented
;
5053 TCGv_i64 tmp
= tcg_temp_new_i64();
5054 tcg_gen_ld_i64(tmp
, cpu_env
,
5055 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5056 #if defined(TARGET_MIPS64)
5058 /* Move RI/XI fields to bits 31:30 */
5059 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5060 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5063 gen_move_low32(arg
, tmp
);
5064 tcg_temp_free_i64(tmp
);
5069 goto cp0_unimplemented
;
5075 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5076 tcg_gen_ext32s_tl(arg
, arg
);
5080 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5081 rn
= "ContextConfig";
5082 goto cp0_unimplemented
;
5085 CP0_CHECK(ctx
->ulri
);
5086 tcg_gen_ld32s_tl(arg
, cpu_env
,
5087 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5091 goto cp0_unimplemented
;
5097 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5101 check_insn(ctx
, ISA_MIPS32R2
);
5102 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5106 goto cp0_unimplemented
;
5112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5116 check_insn(ctx
, ISA_MIPS32R2
);
5117 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5121 check_insn(ctx
, ISA_MIPS32R2
);
5122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5126 check_insn(ctx
, ISA_MIPS32R2
);
5127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5131 check_insn(ctx
, ISA_MIPS32R2
);
5132 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5136 check_insn(ctx
, ISA_MIPS32R2
);
5137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5141 goto cp0_unimplemented
;
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5152 goto cp0_unimplemented
;
5158 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5159 tcg_gen_ext32s_tl(arg
, arg
);
5164 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5173 goto cp0_unimplemented
;
5179 /* Mark as an IO operation because we read the time. */
5180 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5183 gen_helper_mfc0_count(arg
, cpu_env
);
5184 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5187 /* Break the TB to be able to take timer interrupts immediately
5188 after reading count. */
5189 ctx
->bstate
= BS_STOP
;
5192 /* 6,7 are implementation dependent */
5194 goto cp0_unimplemented
;
5200 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5201 tcg_gen_ext32s_tl(arg
, arg
);
5205 goto cp0_unimplemented
;
5211 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5214 /* 6,7 are implementation dependent */
5216 goto cp0_unimplemented
;
5222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5226 check_insn(ctx
, ISA_MIPS32R2
);
5227 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5231 check_insn(ctx
, ISA_MIPS32R2
);
5232 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5236 check_insn(ctx
, ISA_MIPS32R2
);
5237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5241 goto cp0_unimplemented
;
5247 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5251 goto cp0_unimplemented
;
5257 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5258 tcg_gen_ext32s_tl(arg
, arg
);
5262 goto cp0_unimplemented
;
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5272 check_insn(ctx
, ISA_MIPS32R2
);
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5277 goto cp0_unimplemented
;
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5306 /* 6,7 are implementation dependent */
5308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5316 goto cp0_unimplemented
;
5322 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5326 goto cp0_unimplemented
;
5332 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5336 goto cp0_unimplemented
;
5342 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5346 goto cp0_unimplemented
;
5352 #if defined(TARGET_MIPS64)
5353 check_insn(ctx
, ISA_MIPS3
);
5354 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5355 tcg_gen_ext32s_tl(arg
, arg
);
5360 goto cp0_unimplemented
;
5364 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5365 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5372 goto cp0_unimplemented
;
5376 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5377 rn
= "'Diagnostic"; /* implementation dependent */
5382 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5386 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5387 rn
= "TraceControl";
5390 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5391 rn
= "TraceControl2";
5394 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5395 rn
= "UserTraceData";
5398 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5402 goto cp0_unimplemented
;
5409 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5410 tcg_gen_ext32s_tl(arg
, arg
);
5414 goto cp0_unimplemented
;
5420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5421 rn
= "Performance0";
5424 // gen_helper_mfc0_performance1(arg);
5425 rn
= "Performance1";
5428 // gen_helper_mfc0_performance2(arg);
5429 rn
= "Performance2";
5432 // gen_helper_mfc0_performance3(arg);
5433 rn
= "Performance3";
5436 // gen_helper_mfc0_performance4(arg);
5437 rn
= "Performance4";
5440 // gen_helper_mfc0_performance5(arg);
5441 rn
= "Performance5";
5444 // gen_helper_mfc0_performance6(arg);
5445 rn
= "Performance6";
5448 // gen_helper_mfc0_performance7(arg);
5449 rn
= "Performance7";
5452 goto cp0_unimplemented
;
5456 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5462 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5466 goto cp0_unimplemented
;
5476 TCGv_i64 tmp
= tcg_temp_new_i64();
5477 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5478 gen_move_low32(arg
, tmp
);
5479 tcg_temp_free_i64(tmp
);
5487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5491 goto cp0_unimplemented
;
5500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5511 goto cp0_unimplemented
;
5517 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5518 tcg_gen_ext32s_tl(arg
, arg
);
5522 goto cp0_unimplemented
;
5529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5533 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5534 tcg_gen_ld_tl(arg
, cpu_env
,
5535 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5536 tcg_gen_ext32s_tl(arg
, arg
);
5540 goto cp0_unimplemented
;
5544 goto cp0_unimplemented
;
5546 (void)rn
; /* avoid a compiler warning */
5547 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5551 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5552 gen_mfc0_unimplemented(ctx
, arg
);
5555 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5557 const char *rn
= "invalid";
5560 check_insn(ctx
, ISA_MIPS32
);
5562 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5570 gen_helper_mtc0_index(cpu_env
, arg
);
5574 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5575 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5579 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5584 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5589 goto cp0_unimplemented
;
5599 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5600 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5604 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5605 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5609 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5610 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5614 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5615 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5619 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5620 tcg_gen_st_tl(arg
, cpu_env
,
5621 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5625 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5626 tcg_gen_st_tl(arg
, cpu_env
,
5627 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5628 rn
= "VPEScheFBack";
5631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5632 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5636 goto cp0_unimplemented
;
5642 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5646 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5647 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5651 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5652 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5656 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5657 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5661 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5662 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5666 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5667 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5671 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5672 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5681 goto cp0_unimplemented
;
5687 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5691 goto cp0_unimplemented
;
5697 gen_helper_mtc0_context(cpu_env
, arg
);
5701 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5702 rn
= "ContextConfig";
5703 goto cp0_unimplemented
;
5706 CP0_CHECK(ctx
->ulri
);
5707 tcg_gen_st_tl(arg
, cpu_env
,
5708 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5712 goto cp0_unimplemented
;
5718 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5722 check_insn(ctx
, ISA_MIPS32R2
);
5723 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5725 ctx
->bstate
= BS_STOP
;
5728 goto cp0_unimplemented
;
5734 gen_helper_mtc0_wired(cpu_env
, arg
);
5738 check_insn(ctx
, ISA_MIPS32R2
);
5739 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5743 check_insn(ctx
, ISA_MIPS32R2
);
5744 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5748 check_insn(ctx
, ISA_MIPS32R2
);
5749 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5753 check_insn(ctx
, ISA_MIPS32R2
);
5754 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5758 check_insn(ctx
, ISA_MIPS32R2
);
5759 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5763 goto cp0_unimplemented
;
5769 check_insn(ctx
, ISA_MIPS32R2
);
5770 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5771 ctx
->bstate
= BS_STOP
;
5775 goto cp0_unimplemented
;
5793 goto cp0_unimplemented
;
5799 gen_helper_mtc0_count(cpu_env
, arg
);
5802 /* 6,7 are implementation dependent */
5804 goto cp0_unimplemented
;
5810 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5814 goto cp0_unimplemented
;
5820 gen_helper_mtc0_compare(cpu_env
, arg
);
5823 /* 6,7 are implementation dependent */
5825 goto cp0_unimplemented
;
5831 save_cpu_state(ctx
, 1);
5832 gen_helper_mtc0_status(cpu_env
, arg
);
5833 /* BS_STOP isn't good enough here, hflags may have changed. */
5834 gen_save_pc(ctx
->pc
+ 4);
5835 ctx
->bstate
= BS_EXCP
;
5839 check_insn(ctx
, ISA_MIPS32R2
);
5840 gen_helper_mtc0_intctl(cpu_env
, arg
);
5841 /* Stop translation as we may have switched the execution mode */
5842 ctx
->bstate
= BS_STOP
;
5846 check_insn(ctx
, ISA_MIPS32R2
);
5847 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5848 /* Stop translation as we may have switched the execution mode */
5849 ctx
->bstate
= BS_STOP
;
5853 check_insn(ctx
, ISA_MIPS32R2
);
5854 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5855 /* Stop translation as we may have switched the execution mode */
5856 ctx
->bstate
= BS_STOP
;
5860 goto cp0_unimplemented
;
5866 save_cpu_state(ctx
, 1);
5867 gen_helper_mtc0_cause(cpu_env
, arg
);
5871 goto cp0_unimplemented
;
5877 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5881 goto cp0_unimplemented
;
5891 check_insn(ctx
, ISA_MIPS32R2
);
5892 gen_helper_mtc0_ebase(cpu_env
, arg
);
5896 goto cp0_unimplemented
;
5902 gen_helper_mtc0_config0(cpu_env
, arg
);
5904 /* Stop translation as we may have switched the execution mode */
5905 ctx
->bstate
= BS_STOP
;
5908 /* ignored, read only */
5912 gen_helper_mtc0_config2(cpu_env
, arg
);
5914 /* Stop translation as we may have switched the execution mode */
5915 ctx
->bstate
= BS_STOP
;
5918 gen_helper_mtc0_config3(cpu_env
, arg
);
5920 /* Stop translation as we may have switched the execution mode */
5921 ctx
->bstate
= BS_STOP
;
5924 gen_helper_mtc0_config4(cpu_env
, arg
);
5926 ctx
->bstate
= BS_STOP
;
5929 gen_helper_mtc0_config5(cpu_env
, arg
);
5931 /* Stop translation as we may have switched the execution mode */
5932 ctx
->bstate
= BS_STOP
;
5934 /* 6,7 are implementation dependent */
5944 rn
= "Invalid config selector";
5945 goto cp0_unimplemented
;
5951 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5955 goto cp0_unimplemented
;
5961 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5965 goto cp0_unimplemented
;
5971 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5975 goto cp0_unimplemented
;
5981 #if defined(TARGET_MIPS64)
5982 check_insn(ctx
, ISA_MIPS3
);
5983 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5988 goto cp0_unimplemented
;
5992 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5993 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5996 gen_helper_mtc0_framemask(cpu_env
, arg
);
6000 goto cp0_unimplemented
;
6005 rn
= "Diagnostic"; /* implementation dependent */
6010 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6011 /* BS_STOP isn't good enough here, hflags may have changed. */
6012 gen_save_pc(ctx
->pc
+ 4);
6013 ctx
->bstate
= BS_EXCP
;
6017 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6018 rn
= "TraceControl";
6019 /* Stop translation as we may have switched the execution mode */
6020 ctx
->bstate
= BS_STOP
;
6023 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6024 rn
= "TraceControl2";
6025 /* Stop translation as we may have switched the execution mode */
6026 ctx
->bstate
= BS_STOP
;
6029 /* Stop translation as we may have switched the execution mode */
6030 ctx
->bstate
= BS_STOP
;
6031 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6032 rn
= "UserTraceData";
6033 /* Stop translation as we may have switched the execution mode */
6034 ctx
->bstate
= BS_STOP
;
6037 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6038 /* Stop translation as we may have switched the execution mode */
6039 ctx
->bstate
= BS_STOP
;
6043 goto cp0_unimplemented
;
6050 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6054 goto cp0_unimplemented
;
6060 gen_helper_mtc0_performance0(cpu_env
, arg
);
6061 rn
= "Performance0";
6064 // gen_helper_mtc0_performance1(arg);
6065 rn
= "Performance1";
6068 // gen_helper_mtc0_performance2(arg);
6069 rn
= "Performance2";
6072 // gen_helper_mtc0_performance3(arg);
6073 rn
= "Performance3";
6076 // gen_helper_mtc0_performance4(arg);
6077 rn
= "Performance4";
6080 // gen_helper_mtc0_performance5(arg);
6081 rn
= "Performance5";
6084 // gen_helper_mtc0_performance6(arg);
6085 rn
= "Performance6";
6088 // gen_helper_mtc0_performance7(arg);
6089 rn
= "Performance7";
6092 goto cp0_unimplemented
;
6106 goto cp0_unimplemented
;
6115 gen_helper_mtc0_taglo(cpu_env
, arg
);
6122 gen_helper_mtc0_datalo(cpu_env
, arg
);
6126 goto cp0_unimplemented
;
6135 gen_helper_mtc0_taghi(cpu_env
, arg
);
6142 gen_helper_mtc0_datahi(cpu_env
, arg
);
6147 goto cp0_unimplemented
;
6153 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6157 goto cp0_unimplemented
;
6164 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6168 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6169 tcg_gen_st_tl(arg
, cpu_env
,
6170 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6174 goto cp0_unimplemented
;
6176 /* Stop translation as we may have switched the execution mode */
6177 ctx
->bstate
= BS_STOP
;
6180 goto cp0_unimplemented
;
6182 (void)rn
; /* avoid a compiler warning */
6183 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6184 /* For simplicity assume that all writes can cause interrupts. */
6185 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6187 ctx
->bstate
= BS_STOP
;
6192 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6195 #if defined(TARGET_MIPS64)
6196 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6198 const char *rn
= "invalid";
6201 check_insn(ctx
, ISA_MIPS64
);
6207 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6211 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6212 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6216 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6217 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6221 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6222 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6226 goto cp0_unimplemented
;
6232 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6233 gen_helper_mfc0_random(arg
, cpu_env
);
6237 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6242 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6243 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6247 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6252 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6253 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6257 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6258 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6262 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6263 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6264 rn
= "VPEScheFBack";
6267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6272 goto cp0_unimplemented
;
6278 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6282 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6283 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6287 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6288 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6293 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6298 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6303 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6313 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6317 goto cp0_unimplemented
;
6323 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6327 goto cp0_unimplemented
;
6333 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6337 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6338 rn
= "ContextConfig";
6339 goto cp0_unimplemented
;
6342 CP0_CHECK(ctx
->ulri
);
6343 tcg_gen_ld_tl(arg
, cpu_env
,
6344 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6348 goto cp0_unimplemented
;
6354 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6358 check_insn(ctx
, ISA_MIPS32R2
);
6359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6363 goto cp0_unimplemented
;
6369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6373 check_insn(ctx
, ISA_MIPS32R2
);
6374 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6378 check_insn(ctx
, ISA_MIPS32R2
);
6379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6383 check_insn(ctx
, ISA_MIPS32R2
);
6384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6388 check_insn(ctx
, ISA_MIPS32R2
);
6389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6393 check_insn(ctx
, ISA_MIPS32R2
);
6394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6398 goto cp0_unimplemented
;
6404 check_insn(ctx
, ISA_MIPS32R2
);
6405 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6409 goto cp0_unimplemented
;
6415 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6425 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6429 goto cp0_unimplemented
;
6435 /* Mark as an IO operation because we read the time. */
6436 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6439 gen_helper_mfc0_count(arg
, cpu_env
);
6440 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6443 /* Break the TB to be able to take timer interrupts immediately
6444 after reading count. */
6445 ctx
->bstate
= BS_STOP
;
6448 /* 6,7 are implementation dependent */
6450 goto cp0_unimplemented
;
6456 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6460 goto cp0_unimplemented
;
6466 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6469 /* 6,7 are implementation dependent */
6471 goto cp0_unimplemented
;
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6481 check_insn(ctx
, ISA_MIPS32R2
);
6482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6486 check_insn(ctx
, ISA_MIPS32R2
);
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6491 check_insn(ctx
, ISA_MIPS32R2
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6496 goto cp0_unimplemented
;
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6506 goto cp0_unimplemented
;
6512 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6516 goto cp0_unimplemented
;
6522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6526 check_insn(ctx
, ISA_MIPS32R2
);
6527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6531 goto cp0_unimplemented
;
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6549 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6560 /* 6,7 are implementation dependent */
6562 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6566 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6570 goto cp0_unimplemented
;
6576 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6580 goto cp0_unimplemented
;
6586 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6590 goto cp0_unimplemented
;
6596 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6600 goto cp0_unimplemented
;
6606 check_insn(ctx
, ISA_MIPS3
);
6607 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6611 goto cp0_unimplemented
;
6615 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6616 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6623 goto cp0_unimplemented
;
6627 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6628 rn
= "'Diagnostic"; /* implementation dependent */
6633 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6637 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6638 rn
= "TraceControl";
6641 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6642 rn
= "TraceControl2";
6645 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6646 rn
= "UserTraceData";
6649 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6653 goto cp0_unimplemented
;
6660 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6664 goto cp0_unimplemented
;
6670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6671 rn
= "Performance0";
6674 // gen_helper_dmfc0_performance1(arg);
6675 rn
= "Performance1";
6678 // gen_helper_dmfc0_performance2(arg);
6679 rn
= "Performance2";
6682 // gen_helper_dmfc0_performance3(arg);
6683 rn
= "Performance3";
6686 // gen_helper_dmfc0_performance4(arg);
6687 rn
= "Performance4";
6690 // gen_helper_dmfc0_performance5(arg);
6691 rn
= "Performance5";
6694 // gen_helper_dmfc0_performance6(arg);
6695 rn
= "Performance6";
6698 // gen_helper_dmfc0_performance7(arg);
6699 rn
= "Performance7";
6702 goto cp0_unimplemented
;
6706 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6713 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6717 goto cp0_unimplemented
;
6726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6733 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6737 goto cp0_unimplemented
;
6746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6757 goto cp0_unimplemented
;
6763 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6767 goto cp0_unimplemented
;
6774 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6778 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6779 tcg_gen_ld_tl(arg
, cpu_env
,
6780 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6784 goto cp0_unimplemented
;
6788 goto cp0_unimplemented
;
6790 (void)rn
; /* avoid a compiler warning */
6791 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6795 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6796 gen_mfc0_unimplemented(ctx
, arg
);
6799 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6801 const char *rn
= "invalid";
6804 check_insn(ctx
, ISA_MIPS64
);
6806 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6814 gen_helper_mtc0_index(cpu_env
, arg
);
6818 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6819 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6823 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6828 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6833 goto cp0_unimplemented
;
6843 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6844 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6849 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6854 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6858 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6870 rn
= "VPEScheFBack";
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6874 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6878 goto cp0_unimplemented
;
6884 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6888 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6889 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6893 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6894 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6898 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6899 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6904 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6909 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6914 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6918 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6919 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6923 goto cp0_unimplemented
;
6929 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6933 goto cp0_unimplemented
;
6939 gen_helper_mtc0_context(cpu_env
, arg
);
6943 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6944 rn
= "ContextConfig";
6945 goto cp0_unimplemented
;
6948 CP0_CHECK(ctx
->ulri
);
6949 tcg_gen_st_tl(arg
, cpu_env
,
6950 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6954 goto cp0_unimplemented
;
6960 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6964 check_insn(ctx
, ISA_MIPS32R2
);
6965 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6969 goto cp0_unimplemented
;
6975 gen_helper_mtc0_wired(cpu_env
, arg
);
6979 check_insn(ctx
, ISA_MIPS32R2
);
6980 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6984 check_insn(ctx
, ISA_MIPS32R2
);
6985 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6989 check_insn(ctx
, ISA_MIPS32R2
);
6990 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6994 check_insn(ctx
, ISA_MIPS32R2
);
6995 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6999 check_insn(ctx
, ISA_MIPS32R2
);
7000 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7004 goto cp0_unimplemented
;
7010 check_insn(ctx
, ISA_MIPS32R2
);
7011 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7012 ctx
->bstate
= BS_STOP
;
7016 goto cp0_unimplemented
;
7034 goto cp0_unimplemented
;
7040 gen_helper_mtc0_count(cpu_env
, arg
);
7043 /* 6,7 are implementation dependent */
7045 goto cp0_unimplemented
;
7047 /* Stop translation as we may have switched the execution mode */
7048 ctx
->bstate
= BS_STOP
;
7053 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7057 goto cp0_unimplemented
;
7063 gen_helper_mtc0_compare(cpu_env
, arg
);
7066 /* 6,7 are implementation dependent */
7068 goto cp0_unimplemented
;
7070 /* Stop translation as we may have switched the execution mode */
7071 ctx
->bstate
= BS_STOP
;
7076 save_cpu_state(ctx
, 1);
7077 gen_helper_mtc0_status(cpu_env
, arg
);
7078 /* BS_STOP isn't good enough here, hflags may have changed. */
7079 gen_save_pc(ctx
->pc
+ 4);
7080 ctx
->bstate
= BS_EXCP
;
7084 check_insn(ctx
, ISA_MIPS32R2
);
7085 gen_helper_mtc0_intctl(cpu_env
, arg
);
7086 /* Stop translation as we may have switched the execution mode */
7087 ctx
->bstate
= BS_STOP
;
7091 check_insn(ctx
, ISA_MIPS32R2
);
7092 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7093 /* Stop translation as we may have switched the execution mode */
7094 ctx
->bstate
= BS_STOP
;
7098 check_insn(ctx
, ISA_MIPS32R2
);
7099 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7100 /* Stop translation as we may have switched the execution mode */
7101 ctx
->bstate
= BS_STOP
;
7105 goto cp0_unimplemented
;
7111 save_cpu_state(ctx
, 1);
7112 /* Mark as an IO operation because we may trigger a software
7114 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7117 gen_helper_mtc0_cause(cpu_env
, arg
);
7118 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7121 /* Stop translation as we may have triggered an intetrupt */
7122 ctx
->bstate
= BS_STOP
;
7126 goto cp0_unimplemented
;
7132 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7136 goto cp0_unimplemented
;
7146 check_insn(ctx
, ISA_MIPS32R2
);
7147 gen_helper_mtc0_ebase(cpu_env
, arg
);
7151 goto cp0_unimplemented
;
7157 gen_helper_mtc0_config0(cpu_env
, arg
);
7159 /* Stop translation as we may have switched the execution mode */
7160 ctx
->bstate
= BS_STOP
;
7163 /* ignored, read only */
7167 gen_helper_mtc0_config2(cpu_env
, arg
);
7169 /* Stop translation as we may have switched the execution mode */
7170 ctx
->bstate
= BS_STOP
;
7173 gen_helper_mtc0_config3(cpu_env
, arg
);
7175 /* Stop translation as we may have switched the execution mode */
7176 ctx
->bstate
= BS_STOP
;
7179 /* currently ignored */
7183 gen_helper_mtc0_config5(cpu_env
, arg
);
7185 /* Stop translation as we may have switched the execution mode */
7186 ctx
->bstate
= BS_STOP
;
7188 /* 6,7 are implementation dependent */
7190 rn
= "Invalid config selector";
7191 goto cp0_unimplemented
;
7197 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7201 goto cp0_unimplemented
;
7207 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7211 goto cp0_unimplemented
;
7217 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7221 goto cp0_unimplemented
;
7227 check_insn(ctx
, ISA_MIPS3
);
7228 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7232 goto cp0_unimplemented
;
7236 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7237 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7240 gen_helper_mtc0_framemask(cpu_env
, arg
);
7244 goto cp0_unimplemented
;
7249 rn
= "Diagnostic"; /* implementation dependent */
7254 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7255 /* BS_STOP isn't good enough here, hflags may have changed. */
7256 gen_save_pc(ctx
->pc
+ 4);
7257 ctx
->bstate
= BS_EXCP
;
7261 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7262 /* Stop translation as we may have switched the execution mode */
7263 ctx
->bstate
= BS_STOP
;
7264 rn
= "TraceControl";
7267 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7268 /* Stop translation as we may have switched the execution mode */
7269 ctx
->bstate
= BS_STOP
;
7270 rn
= "TraceControl2";
7273 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7274 /* Stop translation as we may have switched the execution mode */
7275 ctx
->bstate
= BS_STOP
;
7276 rn
= "UserTraceData";
7279 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7280 /* Stop translation as we may have switched the execution mode */
7281 ctx
->bstate
= BS_STOP
;
7285 goto cp0_unimplemented
;
7292 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7296 goto cp0_unimplemented
;
7302 gen_helper_mtc0_performance0(cpu_env
, arg
);
7303 rn
= "Performance0";
7306 // gen_helper_mtc0_performance1(cpu_env, arg);
7307 rn
= "Performance1";
7310 // gen_helper_mtc0_performance2(cpu_env, arg);
7311 rn
= "Performance2";
7314 // gen_helper_mtc0_performance3(cpu_env, arg);
7315 rn
= "Performance3";
7318 // gen_helper_mtc0_performance4(cpu_env, arg);
7319 rn
= "Performance4";
7322 // gen_helper_mtc0_performance5(cpu_env, arg);
7323 rn
= "Performance5";
7326 // gen_helper_mtc0_performance6(cpu_env, arg);
7327 rn
= "Performance6";
7330 // gen_helper_mtc0_performance7(cpu_env, arg);
7331 rn
= "Performance7";
7334 goto cp0_unimplemented
;
7348 goto cp0_unimplemented
;
7357 gen_helper_mtc0_taglo(cpu_env
, arg
);
7364 gen_helper_mtc0_datalo(cpu_env
, arg
);
7368 goto cp0_unimplemented
;
7377 gen_helper_mtc0_taghi(cpu_env
, arg
);
7384 gen_helper_mtc0_datahi(cpu_env
, arg
);
7389 goto cp0_unimplemented
;
7395 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7399 goto cp0_unimplemented
;
7406 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7410 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7411 tcg_gen_st_tl(arg
, cpu_env
,
7412 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7416 goto cp0_unimplemented
;
7418 /* Stop translation as we may have switched the execution mode */
7419 ctx
->bstate
= BS_STOP
;
7422 goto cp0_unimplemented
;
7424 (void)rn
; /* avoid a compiler warning */
7425 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7426 /* For simplicity assume that all writes can cause interrupts. */
7427 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7429 ctx
->bstate
= BS_STOP
;
7434 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7436 #endif /* TARGET_MIPS64 */
7438 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7439 int u
, int sel
, int h
)
7441 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7442 TCGv t0
= tcg_temp_local_new();
7444 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7445 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7446 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7447 tcg_gen_movi_tl(t0
, -1);
7448 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7449 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7450 tcg_gen_movi_tl(t0
, -1);
7456 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7459 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7469 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7472 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7475 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7478 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7481 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7484 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7487 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7490 gen_mfc0(ctx
, t0
, rt
, sel
);
7497 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7500 gen_mfc0(ctx
, t0
, rt
, sel
);
7506 gen_helper_mftc0_status(t0
, cpu_env
);
7509 gen_mfc0(ctx
, t0
, rt
, sel
);
7515 gen_helper_mftc0_cause(t0
, cpu_env
);
7525 gen_helper_mftc0_epc(t0
, cpu_env
);
7535 gen_helper_mftc0_ebase(t0
, cpu_env
);
7545 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7555 gen_helper_mftc0_debug(t0
, cpu_env
);
7558 gen_mfc0(ctx
, t0
, rt
, sel
);
7563 gen_mfc0(ctx
, t0
, rt
, sel
);
7565 } else switch (sel
) {
7566 /* GPR registers. */
7568 gen_helper_1e0i(mftgpr
, t0
, rt
);
7570 /* Auxiliary CPU registers */
7574 gen_helper_1e0i(mftlo
, t0
, 0);
7577 gen_helper_1e0i(mfthi
, t0
, 0);
7580 gen_helper_1e0i(mftacx
, t0
, 0);
7583 gen_helper_1e0i(mftlo
, t0
, 1);
7586 gen_helper_1e0i(mfthi
, t0
, 1);
7589 gen_helper_1e0i(mftacx
, t0
, 1);
7592 gen_helper_1e0i(mftlo
, t0
, 2);
7595 gen_helper_1e0i(mfthi
, t0
, 2);
7598 gen_helper_1e0i(mftacx
, t0
, 2);
7601 gen_helper_1e0i(mftlo
, t0
, 3);
7604 gen_helper_1e0i(mfthi
, t0
, 3);
7607 gen_helper_1e0i(mftacx
, t0
, 3);
7610 gen_helper_mftdsp(t0
, cpu_env
);
7616 /* Floating point (COP1). */
7618 /* XXX: For now we support only a single FPU context. */
7620 TCGv_i32 fp0
= tcg_temp_new_i32();
7622 gen_load_fpr32(ctx
, fp0
, rt
);
7623 tcg_gen_ext_i32_tl(t0
, fp0
);
7624 tcg_temp_free_i32(fp0
);
7626 TCGv_i32 fp0
= tcg_temp_new_i32();
7628 gen_load_fpr32h(ctx
, fp0
, rt
);
7629 tcg_gen_ext_i32_tl(t0
, fp0
);
7630 tcg_temp_free_i32(fp0
);
7634 /* XXX: For now we support only a single FPU context. */
7635 gen_helper_1e0i(cfc1
, t0
, rt
);
7637 /* COP2: Not implemented. */
7644 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7645 gen_store_gpr(t0
, rd
);
7651 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7652 generate_exception_end(ctx
, EXCP_RI
);
7655 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7656 int u
, int sel
, int h
)
7658 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7659 TCGv t0
= tcg_temp_local_new();
7661 gen_load_gpr(t0
, rt
);
7662 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7663 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7664 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7666 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7667 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7674 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7677 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7687 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7690 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7693 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7696 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7699 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7702 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7705 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7708 gen_mtc0(ctx
, t0
, rd
, sel
);
7715 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7718 gen_mtc0(ctx
, t0
, rd
, sel
);
7724 gen_helper_mttc0_status(cpu_env
, t0
);
7727 gen_mtc0(ctx
, t0
, rd
, sel
);
7733 gen_helper_mttc0_cause(cpu_env
, t0
);
7743 gen_helper_mttc0_ebase(cpu_env
, t0
);
7753 gen_helper_mttc0_debug(cpu_env
, t0
);
7756 gen_mtc0(ctx
, t0
, rd
, sel
);
7761 gen_mtc0(ctx
, t0
, rd
, sel
);
7763 } else switch (sel
) {
7764 /* GPR registers. */
7766 gen_helper_0e1i(mttgpr
, t0
, rd
);
7768 /* Auxiliary CPU registers */
7772 gen_helper_0e1i(mttlo
, t0
, 0);
7775 gen_helper_0e1i(mtthi
, t0
, 0);
7778 gen_helper_0e1i(mttacx
, t0
, 0);
7781 gen_helper_0e1i(mttlo
, t0
, 1);
7784 gen_helper_0e1i(mtthi
, t0
, 1);
7787 gen_helper_0e1i(mttacx
, t0
, 1);
7790 gen_helper_0e1i(mttlo
, t0
, 2);
7793 gen_helper_0e1i(mtthi
, t0
, 2);
7796 gen_helper_0e1i(mttacx
, t0
, 2);
7799 gen_helper_0e1i(mttlo
, t0
, 3);
7802 gen_helper_0e1i(mtthi
, t0
, 3);
7805 gen_helper_0e1i(mttacx
, t0
, 3);
7808 gen_helper_mttdsp(cpu_env
, t0
);
7814 /* Floating point (COP1). */
7816 /* XXX: For now we support only a single FPU context. */
7818 TCGv_i32 fp0
= tcg_temp_new_i32();
7820 tcg_gen_trunc_tl_i32(fp0
, t0
);
7821 gen_store_fpr32(ctx
, fp0
, rd
);
7822 tcg_temp_free_i32(fp0
);
7824 TCGv_i32 fp0
= tcg_temp_new_i32();
7826 tcg_gen_trunc_tl_i32(fp0
, t0
);
7827 gen_store_fpr32h(ctx
, fp0
, rd
);
7828 tcg_temp_free_i32(fp0
);
7832 /* XXX: For now we support only a single FPU context. */
7834 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7836 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7837 tcg_temp_free_i32(fs_tmp
);
7839 /* Stop translation as we may have changed hflags */
7840 ctx
->bstate
= BS_STOP
;
7842 /* COP2: Not implemented. */
7849 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7855 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7856 generate_exception_end(ctx
, EXCP_RI
);
7859 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7861 const char *opn
= "ldst";
7863 check_cp0_enabled(ctx
);
7870 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7875 TCGv t0
= tcg_temp_new();
7877 gen_load_gpr(t0
, rt
);
7878 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7883 #if defined(TARGET_MIPS64)
7885 check_insn(ctx
, ISA_MIPS3
);
7890 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7894 check_insn(ctx
, ISA_MIPS3
);
7896 TCGv t0
= tcg_temp_new();
7898 gen_load_gpr(t0
, rt
);
7899 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7911 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7917 TCGv t0
= tcg_temp_new();
7918 gen_load_gpr(t0
, rt
);
7919 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7925 check_insn(ctx
, ASE_MT
);
7930 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7931 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7935 check_insn(ctx
, ASE_MT
);
7936 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7937 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7942 if (!env
->tlb
->helper_tlbwi
)
7944 gen_helper_tlbwi(cpu_env
);
7949 if (!env
->tlb
->helper_tlbinv
) {
7952 gen_helper_tlbinv(cpu_env
);
7953 } /* treat as nop if TLBINV not supported */
7958 if (!env
->tlb
->helper_tlbinvf
) {
7961 gen_helper_tlbinvf(cpu_env
);
7962 } /* treat as nop if TLBINV not supported */
7966 if (!env
->tlb
->helper_tlbwr
)
7968 gen_helper_tlbwr(cpu_env
);
7972 if (!env
->tlb
->helper_tlbp
)
7974 gen_helper_tlbp(cpu_env
);
7978 if (!env
->tlb
->helper_tlbr
)
7980 gen_helper_tlbr(cpu_env
);
7982 case OPC_ERET
: /* OPC_ERETNC */
7983 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7984 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7987 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
7988 if (ctx
->opcode
& (1 << bit_shift
)) {
7991 check_insn(ctx
, ISA_MIPS32R5
);
7992 gen_helper_eretnc(cpu_env
);
7996 check_insn(ctx
, ISA_MIPS2
);
7997 gen_helper_eret(cpu_env
);
7999 ctx
->bstate
= BS_EXCP
;
8004 check_insn(ctx
, ISA_MIPS32
);
8005 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8006 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8009 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8011 generate_exception_end(ctx
, EXCP_RI
);
8013 gen_helper_deret(cpu_env
);
8014 ctx
->bstate
= BS_EXCP
;
8019 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8020 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8021 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8024 /* If we get an exception, we want to restart at next instruction */
8026 save_cpu_state(ctx
, 1);
8028 gen_helper_wait(cpu_env
);
8029 ctx
->bstate
= BS_EXCP
;
8034 generate_exception_end(ctx
, EXCP_RI
);
8037 (void)opn
; /* avoid a compiler warning */
8039 #endif /* !CONFIG_USER_ONLY */
8041 /* CP1 Branches (before delay slot) */
8042 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8043 int32_t cc
, int32_t offset
)
8045 target_ulong btarget
;
8046 TCGv_i32 t0
= tcg_temp_new_i32();
8048 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8049 generate_exception_end(ctx
, EXCP_RI
);
8054 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8056 btarget
= ctx
->pc
+ 4 + offset
;
8060 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8061 tcg_gen_not_i32(t0
, t0
);
8062 tcg_gen_andi_i32(t0
, t0
, 1);
8063 tcg_gen_extu_i32_tl(bcond
, t0
);
8066 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8067 tcg_gen_not_i32(t0
, t0
);
8068 tcg_gen_andi_i32(t0
, t0
, 1);
8069 tcg_gen_extu_i32_tl(bcond
, t0
);
8072 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8073 tcg_gen_andi_i32(t0
, t0
, 1);
8074 tcg_gen_extu_i32_tl(bcond
, t0
);
8077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8078 tcg_gen_andi_i32(t0
, t0
, 1);
8079 tcg_gen_extu_i32_tl(bcond
, t0
);
8081 ctx
->hflags
|= MIPS_HFLAG_BL
;
8085 TCGv_i32 t1
= tcg_temp_new_i32();
8086 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8087 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8088 tcg_gen_nand_i32(t0
, t0
, t1
);
8089 tcg_temp_free_i32(t1
);
8090 tcg_gen_andi_i32(t0
, t0
, 1);
8091 tcg_gen_extu_i32_tl(bcond
, t0
);
8096 TCGv_i32 t1
= tcg_temp_new_i32();
8097 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8098 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8099 tcg_gen_or_i32(t0
, t0
, t1
);
8100 tcg_temp_free_i32(t1
);
8101 tcg_gen_andi_i32(t0
, t0
, 1);
8102 tcg_gen_extu_i32_tl(bcond
, t0
);
8107 TCGv_i32 t1
= tcg_temp_new_i32();
8108 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8109 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8110 tcg_gen_and_i32(t0
, t0
, t1
);
8111 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8112 tcg_gen_and_i32(t0
, t0
, t1
);
8113 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8114 tcg_gen_nand_i32(t0
, t0
, t1
);
8115 tcg_temp_free_i32(t1
);
8116 tcg_gen_andi_i32(t0
, t0
, 1);
8117 tcg_gen_extu_i32_tl(bcond
, t0
);
8122 TCGv_i32 t1
= tcg_temp_new_i32();
8123 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8124 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8125 tcg_gen_or_i32(t0
, t0
, t1
);
8126 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8127 tcg_gen_or_i32(t0
, t0
, t1
);
8128 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8129 tcg_gen_or_i32(t0
, t0
, t1
);
8130 tcg_temp_free_i32(t1
);
8131 tcg_gen_andi_i32(t0
, t0
, 1);
8132 tcg_gen_extu_i32_tl(bcond
, t0
);
8135 ctx
->hflags
|= MIPS_HFLAG_BC
;
8138 MIPS_INVAL("cp1 cond branch");
8139 generate_exception_end(ctx
, EXCP_RI
);
8142 ctx
->btarget
= btarget
;
8143 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8145 tcg_temp_free_i32(t0
);
8148 /* R6 CP1 Branches */
8149 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8150 int32_t ft
, int32_t offset
,
8153 target_ulong btarget
;
8154 TCGv_i64 t0
= tcg_temp_new_i64();
8156 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8157 #ifdef MIPS_DEBUG_DISAS
8158 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8161 generate_exception_end(ctx
, EXCP_RI
);
8165 gen_load_fpr64(ctx
, t0
, ft
);
8166 tcg_gen_andi_i64(t0
, t0
, 1);
8168 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8172 tcg_gen_xori_i64(t0
, t0
, 1);
8173 ctx
->hflags
|= MIPS_HFLAG_BC
;
8176 /* t0 already set */
8177 ctx
->hflags
|= MIPS_HFLAG_BC
;
8180 MIPS_INVAL("cp1 cond branch");
8181 generate_exception_end(ctx
, EXCP_RI
);
8185 tcg_gen_trunc_i64_tl(bcond
, t0
);
8187 ctx
->btarget
= btarget
;
8189 switch (delayslot_size
) {
8191 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8194 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8199 tcg_temp_free_i64(t0
);
8202 /* Coprocessor 1 (FPU) */
8204 #define FOP(func, fmt) (((fmt) << 21) | (func))
8207 OPC_ADD_S
= FOP(0, FMT_S
),
8208 OPC_SUB_S
= FOP(1, FMT_S
),
8209 OPC_MUL_S
= FOP(2, FMT_S
),
8210 OPC_DIV_S
= FOP(3, FMT_S
),
8211 OPC_SQRT_S
= FOP(4, FMT_S
),
8212 OPC_ABS_S
= FOP(5, FMT_S
),
8213 OPC_MOV_S
= FOP(6, FMT_S
),
8214 OPC_NEG_S
= FOP(7, FMT_S
),
8215 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8216 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8217 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8218 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8219 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8220 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8221 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8222 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8223 OPC_SEL_S
= FOP(16, FMT_S
),
8224 OPC_MOVCF_S
= FOP(17, FMT_S
),
8225 OPC_MOVZ_S
= FOP(18, FMT_S
),
8226 OPC_MOVN_S
= FOP(19, FMT_S
),
8227 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8228 OPC_RECIP_S
= FOP(21, FMT_S
),
8229 OPC_RSQRT_S
= FOP(22, FMT_S
),
8230 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8231 OPC_MADDF_S
= FOP(24, FMT_S
),
8232 OPC_MSUBF_S
= FOP(25, FMT_S
),
8233 OPC_RINT_S
= FOP(26, FMT_S
),
8234 OPC_CLASS_S
= FOP(27, FMT_S
),
8235 OPC_MIN_S
= FOP(28, FMT_S
),
8236 OPC_RECIP2_S
= FOP(28, FMT_S
),
8237 OPC_MINA_S
= FOP(29, FMT_S
),
8238 OPC_RECIP1_S
= FOP(29, FMT_S
),
8239 OPC_MAX_S
= FOP(30, FMT_S
),
8240 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8241 OPC_MAXA_S
= FOP(31, FMT_S
),
8242 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8243 OPC_CVT_D_S
= FOP(33, FMT_S
),
8244 OPC_CVT_W_S
= FOP(36, FMT_S
),
8245 OPC_CVT_L_S
= FOP(37, FMT_S
),
8246 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8247 OPC_CMP_F_S
= FOP (48, FMT_S
),
8248 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8249 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8250 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8251 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8252 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8253 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8254 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8255 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8256 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8257 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8258 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8259 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8260 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8261 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8262 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8264 OPC_ADD_D
= FOP(0, FMT_D
),
8265 OPC_SUB_D
= FOP(1, FMT_D
),
8266 OPC_MUL_D
= FOP(2, FMT_D
),
8267 OPC_DIV_D
= FOP(3, FMT_D
),
8268 OPC_SQRT_D
= FOP(4, FMT_D
),
8269 OPC_ABS_D
= FOP(5, FMT_D
),
8270 OPC_MOV_D
= FOP(6, FMT_D
),
8271 OPC_NEG_D
= FOP(7, FMT_D
),
8272 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8273 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8274 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8275 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8276 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8277 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8278 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8279 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8280 OPC_SEL_D
= FOP(16, FMT_D
),
8281 OPC_MOVCF_D
= FOP(17, FMT_D
),
8282 OPC_MOVZ_D
= FOP(18, FMT_D
),
8283 OPC_MOVN_D
= FOP(19, FMT_D
),
8284 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8285 OPC_RECIP_D
= FOP(21, FMT_D
),
8286 OPC_RSQRT_D
= FOP(22, FMT_D
),
8287 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8288 OPC_MADDF_D
= FOP(24, FMT_D
),
8289 OPC_MSUBF_D
= FOP(25, FMT_D
),
8290 OPC_RINT_D
= FOP(26, FMT_D
),
8291 OPC_CLASS_D
= FOP(27, FMT_D
),
8292 OPC_MIN_D
= FOP(28, FMT_D
),
8293 OPC_RECIP2_D
= FOP(28, FMT_D
),
8294 OPC_MINA_D
= FOP(29, FMT_D
),
8295 OPC_RECIP1_D
= FOP(29, FMT_D
),
8296 OPC_MAX_D
= FOP(30, FMT_D
),
8297 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8298 OPC_MAXA_D
= FOP(31, FMT_D
),
8299 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8300 OPC_CVT_S_D
= FOP(32, FMT_D
),
8301 OPC_CVT_W_D
= FOP(36, FMT_D
),
8302 OPC_CVT_L_D
= FOP(37, FMT_D
),
8303 OPC_CMP_F_D
= FOP (48, FMT_D
),
8304 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8305 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8306 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8307 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8308 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8309 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8310 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8311 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8312 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8313 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8314 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8315 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8316 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8317 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8318 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8320 OPC_CVT_S_W
= FOP(32, FMT_W
),
8321 OPC_CVT_D_W
= FOP(33, FMT_W
),
8322 OPC_CVT_S_L
= FOP(32, FMT_L
),
8323 OPC_CVT_D_L
= FOP(33, FMT_L
),
8324 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8326 OPC_ADD_PS
= FOP(0, FMT_PS
),
8327 OPC_SUB_PS
= FOP(1, FMT_PS
),
8328 OPC_MUL_PS
= FOP(2, FMT_PS
),
8329 OPC_DIV_PS
= FOP(3, FMT_PS
),
8330 OPC_ABS_PS
= FOP(5, FMT_PS
),
8331 OPC_MOV_PS
= FOP(6, FMT_PS
),
8332 OPC_NEG_PS
= FOP(7, FMT_PS
),
8333 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8334 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8335 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8336 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8337 OPC_MULR_PS
= FOP(26, FMT_PS
),
8338 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8339 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8340 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8341 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8343 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8344 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8345 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8346 OPC_PLL_PS
= FOP(44, FMT_PS
),
8347 OPC_PLU_PS
= FOP(45, FMT_PS
),
8348 OPC_PUL_PS
= FOP(46, FMT_PS
),
8349 OPC_PUU_PS
= FOP(47, FMT_PS
),
8350 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8351 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8352 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8353 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8354 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8355 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8356 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8357 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8358 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8359 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8360 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8361 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8362 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8363 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8364 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8365 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8369 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8370 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8371 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8372 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8373 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8374 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8375 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8376 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8377 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8378 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8379 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8380 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8381 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8382 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8383 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8384 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8385 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8386 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8387 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8388 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8389 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8390 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8392 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8393 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8394 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8395 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8396 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8397 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8398 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8399 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8400 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8401 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8402 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8403 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8404 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8405 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8406 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8407 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8408 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8409 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8410 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8411 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8412 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8413 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8415 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8417 TCGv t0
= tcg_temp_new();
8422 TCGv_i32 fp0
= tcg_temp_new_i32();
8424 gen_load_fpr32(ctx
, fp0
, fs
);
8425 tcg_gen_ext_i32_tl(t0
, fp0
);
8426 tcg_temp_free_i32(fp0
);
8428 gen_store_gpr(t0
, rt
);
8431 gen_load_gpr(t0
, rt
);
8433 TCGv_i32 fp0
= tcg_temp_new_i32();
8435 tcg_gen_trunc_tl_i32(fp0
, t0
);
8436 gen_store_fpr32(ctx
, fp0
, fs
);
8437 tcg_temp_free_i32(fp0
);
8441 gen_helper_1e0i(cfc1
, t0
, fs
);
8442 gen_store_gpr(t0
, rt
);
8445 gen_load_gpr(t0
, rt
);
8446 save_cpu_state(ctx
, 0);
8448 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8450 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8451 tcg_temp_free_i32(fs_tmp
);
8453 /* Stop translation as we may have changed hflags */
8454 ctx
->bstate
= BS_STOP
;
8456 #if defined(TARGET_MIPS64)
8458 gen_load_fpr64(ctx
, t0
, fs
);
8459 gen_store_gpr(t0
, rt
);
8462 gen_load_gpr(t0
, rt
);
8463 gen_store_fpr64(ctx
, t0
, fs
);
8468 TCGv_i32 fp0
= tcg_temp_new_i32();
8470 gen_load_fpr32h(ctx
, fp0
, fs
);
8471 tcg_gen_ext_i32_tl(t0
, fp0
);
8472 tcg_temp_free_i32(fp0
);
8474 gen_store_gpr(t0
, rt
);
8477 gen_load_gpr(t0
, rt
);
8479 TCGv_i32 fp0
= tcg_temp_new_i32();
8481 tcg_gen_trunc_tl_i32(fp0
, t0
);
8482 gen_store_fpr32h(ctx
, fp0
, fs
);
8483 tcg_temp_free_i32(fp0
);
8487 MIPS_INVAL("cp1 move");
8488 generate_exception_end(ctx
, EXCP_RI
);
8496 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8512 l1
= gen_new_label();
8513 t0
= tcg_temp_new_i32();
8514 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8515 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8516 tcg_temp_free_i32(t0
);
8518 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8520 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8525 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8529 TCGv_i32 t0
= tcg_temp_new_i32();
8530 TCGLabel
*l1
= gen_new_label();
8537 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8538 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8539 gen_load_fpr32(ctx
, t0
, fs
);
8540 gen_store_fpr32(ctx
, t0
, fd
);
8542 tcg_temp_free_i32(t0
);
8545 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8548 TCGv_i32 t0
= tcg_temp_new_i32();
8550 TCGLabel
*l1
= gen_new_label();
8557 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8558 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8559 tcg_temp_free_i32(t0
);
8560 fp0
= tcg_temp_new_i64();
8561 gen_load_fpr64(ctx
, fp0
, fs
);
8562 gen_store_fpr64(ctx
, fp0
, fd
);
8563 tcg_temp_free_i64(fp0
);
8567 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8571 TCGv_i32 t0
= tcg_temp_new_i32();
8572 TCGLabel
*l1
= gen_new_label();
8573 TCGLabel
*l2
= gen_new_label();
8580 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8581 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8582 gen_load_fpr32(ctx
, t0
, fs
);
8583 gen_store_fpr32(ctx
, t0
, fd
);
8586 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8587 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8588 gen_load_fpr32h(ctx
, t0
, fs
);
8589 gen_store_fpr32h(ctx
, t0
, fd
);
8590 tcg_temp_free_i32(t0
);
8594 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8597 TCGv_i32 t1
= tcg_const_i32(0);
8598 TCGv_i32 fp0
= tcg_temp_new_i32();
8599 TCGv_i32 fp1
= tcg_temp_new_i32();
8600 TCGv_i32 fp2
= tcg_temp_new_i32();
8601 gen_load_fpr32(ctx
, fp0
, fd
);
8602 gen_load_fpr32(ctx
, fp1
, ft
);
8603 gen_load_fpr32(ctx
, fp2
, fs
);
8607 tcg_gen_andi_i32(fp0
, fp0
, 1);
8608 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8611 tcg_gen_andi_i32(fp1
, fp1
, 1);
8612 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8615 tcg_gen_andi_i32(fp1
, fp1
, 1);
8616 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8619 MIPS_INVAL("gen_sel_s");
8620 generate_exception_end(ctx
, EXCP_RI
);
8624 gen_store_fpr32(ctx
, fp0
, fd
);
8625 tcg_temp_free_i32(fp2
);
8626 tcg_temp_free_i32(fp1
);
8627 tcg_temp_free_i32(fp0
);
8628 tcg_temp_free_i32(t1
);
8631 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8634 TCGv_i64 t1
= tcg_const_i64(0);
8635 TCGv_i64 fp0
= tcg_temp_new_i64();
8636 TCGv_i64 fp1
= tcg_temp_new_i64();
8637 TCGv_i64 fp2
= tcg_temp_new_i64();
8638 gen_load_fpr64(ctx
, fp0
, fd
);
8639 gen_load_fpr64(ctx
, fp1
, ft
);
8640 gen_load_fpr64(ctx
, fp2
, fs
);
8644 tcg_gen_andi_i64(fp0
, fp0
, 1);
8645 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8648 tcg_gen_andi_i64(fp1
, fp1
, 1);
8649 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8652 tcg_gen_andi_i64(fp1
, fp1
, 1);
8653 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8656 MIPS_INVAL("gen_sel_d");
8657 generate_exception_end(ctx
, EXCP_RI
);
8661 gen_store_fpr64(ctx
, fp0
, fd
);
8662 tcg_temp_free_i64(fp2
);
8663 tcg_temp_free_i64(fp1
);
8664 tcg_temp_free_i64(fp0
);
8665 tcg_temp_free_i64(t1
);
8668 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8669 int ft
, int fs
, int fd
, int cc
)
8671 uint32_t func
= ctx
->opcode
& 0x3f;
8675 TCGv_i32 fp0
= tcg_temp_new_i32();
8676 TCGv_i32 fp1
= tcg_temp_new_i32();
8678 gen_load_fpr32(ctx
, fp0
, fs
);
8679 gen_load_fpr32(ctx
, fp1
, ft
);
8680 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8681 tcg_temp_free_i32(fp1
);
8682 gen_store_fpr32(ctx
, fp0
, fd
);
8683 tcg_temp_free_i32(fp0
);
8688 TCGv_i32 fp0
= tcg_temp_new_i32();
8689 TCGv_i32 fp1
= tcg_temp_new_i32();
8691 gen_load_fpr32(ctx
, fp0
, fs
);
8692 gen_load_fpr32(ctx
, fp1
, ft
);
8693 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8694 tcg_temp_free_i32(fp1
);
8695 gen_store_fpr32(ctx
, fp0
, fd
);
8696 tcg_temp_free_i32(fp0
);
8701 TCGv_i32 fp0
= tcg_temp_new_i32();
8702 TCGv_i32 fp1
= tcg_temp_new_i32();
8704 gen_load_fpr32(ctx
, fp0
, fs
);
8705 gen_load_fpr32(ctx
, fp1
, ft
);
8706 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8707 tcg_temp_free_i32(fp1
);
8708 gen_store_fpr32(ctx
, fp0
, fd
);
8709 tcg_temp_free_i32(fp0
);
8714 TCGv_i32 fp0
= tcg_temp_new_i32();
8715 TCGv_i32 fp1
= tcg_temp_new_i32();
8717 gen_load_fpr32(ctx
, fp0
, fs
);
8718 gen_load_fpr32(ctx
, fp1
, ft
);
8719 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8720 tcg_temp_free_i32(fp1
);
8721 gen_store_fpr32(ctx
, fp0
, fd
);
8722 tcg_temp_free_i32(fp0
);
8727 TCGv_i32 fp0
= tcg_temp_new_i32();
8729 gen_load_fpr32(ctx
, fp0
, fs
);
8730 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8731 gen_store_fpr32(ctx
, fp0
, fd
);
8732 tcg_temp_free_i32(fp0
);
8737 TCGv_i32 fp0
= tcg_temp_new_i32();
8739 gen_load_fpr32(ctx
, fp0
, fs
);
8740 gen_helper_float_abs_s(fp0
, fp0
);
8741 gen_store_fpr32(ctx
, fp0
, fd
);
8742 tcg_temp_free_i32(fp0
);
8747 TCGv_i32 fp0
= tcg_temp_new_i32();
8749 gen_load_fpr32(ctx
, fp0
, fs
);
8750 gen_store_fpr32(ctx
, fp0
, fd
);
8751 tcg_temp_free_i32(fp0
);
8756 TCGv_i32 fp0
= tcg_temp_new_i32();
8758 gen_load_fpr32(ctx
, fp0
, fs
);
8759 gen_helper_float_chs_s(fp0
, fp0
);
8760 gen_store_fpr32(ctx
, fp0
, fd
);
8761 tcg_temp_free_i32(fp0
);
8765 check_cp1_64bitmode(ctx
);
8767 TCGv_i32 fp32
= tcg_temp_new_i32();
8768 TCGv_i64 fp64
= tcg_temp_new_i64();
8770 gen_load_fpr32(ctx
, fp32
, fs
);
8771 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8772 tcg_temp_free_i32(fp32
);
8773 gen_store_fpr64(ctx
, fp64
, fd
);
8774 tcg_temp_free_i64(fp64
);
8778 check_cp1_64bitmode(ctx
);
8780 TCGv_i32 fp32
= tcg_temp_new_i32();
8781 TCGv_i64 fp64
= tcg_temp_new_i64();
8783 gen_load_fpr32(ctx
, fp32
, fs
);
8784 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8785 tcg_temp_free_i32(fp32
);
8786 gen_store_fpr64(ctx
, fp64
, fd
);
8787 tcg_temp_free_i64(fp64
);
8791 check_cp1_64bitmode(ctx
);
8793 TCGv_i32 fp32
= tcg_temp_new_i32();
8794 TCGv_i64 fp64
= tcg_temp_new_i64();
8796 gen_load_fpr32(ctx
, fp32
, fs
);
8797 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8798 tcg_temp_free_i32(fp32
);
8799 gen_store_fpr64(ctx
, fp64
, fd
);
8800 tcg_temp_free_i64(fp64
);
8804 check_cp1_64bitmode(ctx
);
8806 TCGv_i32 fp32
= tcg_temp_new_i32();
8807 TCGv_i64 fp64
= tcg_temp_new_i64();
8809 gen_load_fpr32(ctx
, fp32
, fs
);
8810 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8811 tcg_temp_free_i32(fp32
);
8812 gen_store_fpr64(ctx
, fp64
, fd
);
8813 tcg_temp_free_i64(fp64
);
8818 TCGv_i32 fp0
= tcg_temp_new_i32();
8820 gen_load_fpr32(ctx
, fp0
, fs
);
8821 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8822 gen_store_fpr32(ctx
, fp0
, fd
);
8823 tcg_temp_free_i32(fp0
);
8828 TCGv_i32 fp0
= tcg_temp_new_i32();
8830 gen_load_fpr32(ctx
, fp0
, fs
);
8831 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8832 gen_store_fpr32(ctx
, fp0
, fd
);
8833 tcg_temp_free_i32(fp0
);
8838 TCGv_i32 fp0
= tcg_temp_new_i32();
8840 gen_load_fpr32(ctx
, fp0
, fs
);
8841 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8842 gen_store_fpr32(ctx
, fp0
, fd
);
8843 tcg_temp_free_i32(fp0
);
8848 TCGv_i32 fp0
= tcg_temp_new_i32();
8850 gen_load_fpr32(ctx
, fp0
, fs
);
8851 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8852 gen_store_fpr32(ctx
, fp0
, fd
);
8853 tcg_temp_free_i32(fp0
);
8857 check_insn(ctx
, ISA_MIPS32R6
);
8858 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8861 check_insn(ctx
, ISA_MIPS32R6
);
8862 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8865 check_insn(ctx
, ISA_MIPS32R6
);
8866 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8870 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8875 TCGLabel
*l1
= gen_new_label();
8879 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8881 fp0
= tcg_temp_new_i32();
8882 gen_load_fpr32(ctx
, fp0
, fs
);
8883 gen_store_fpr32(ctx
, fp0
, fd
);
8884 tcg_temp_free_i32(fp0
);
8889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8891 TCGLabel
*l1
= gen_new_label();
8895 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8896 fp0
= tcg_temp_new_i32();
8897 gen_load_fpr32(ctx
, fp0
, fs
);
8898 gen_store_fpr32(ctx
, fp0
, fd
);
8899 tcg_temp_free_i32(fp0
);
8906 TCGv_i32 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8910 gen_store_fpr32(ctx
, fp0
, fd
);
8911 tcg_temp_free_i32(fp0
);
8916 TCGv_i32 fp0
= tcg_temp_new_i32();
8918 gen_load_fpr32(ctx
, fp0
, fs
);
8919 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8920 gen_store_fpr32(ctx
, fp0
, fd
);
8921 tcg_temp_free_i32(fp0
);
8925 check_insn(ctx
, ISA_MIPS32R6
);
8927 TCGv_i32 fp0
= tcg_temp_new_i32();
8928 TCGv_i32 fp1
= tcg_temp_new_i32();
8929 TCGv_i32 fp2
= tcg_temp_new_i32();
8930 gen_load_fpr32(ctx
, fp0
, fs
);
8931 gen_load_fpr32(ctx
, fp1
, ft
);
8932 gen_load_fpr32(ctx
, fp2
, fd
);
8933 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8934 gen_store_fpr32(ctx
, fp2
, fd
);
8935 tcg_temp_free_i32(fp2
);
8936 tcg_temp_free_i32(fp1
);
8937 tcg_temp_free_i32(fp0
);
8941 check_insn(ctx
, ISA_MIPS32R6
);
8943 TCGv_i32 fp0
= tcg_temp_new_i32();
8944 TCGv_i32 fp1
= tcg_temp_new_i32();
8945 TCGv_i32 fp2
= tcg_temp_new_i32();
8946 gen_load_fpr32(ctx
, fp0
, fs
);
8947 gen_load_fpr32(ctx
, fp1
, ft
);
8948 gen_load_fpr32(ctx
, fp2
, fd
);
8949 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8950 gen_store_fpr32(ctx
, fp2
, fd
);
8951 tcg_temp_free_i32(fp2
);
8952 tcg_temp_free_i32(fp1
);
8953 tcg_temp_free_i32(fp0
);
8957 check_insn(ctx
, ISA_MIPS32R6
);
8959 TCGv_i32 fp0
= tcg_temp_new_i32();
8960 gen_load_fpr32(ctx
, fp0
, fs
);
8961 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8962 gen_store_fpr32(ctx
, fp0
, fd
);
8963 tcg_temp_free_i32(fp0
);
8967 check_insn(ctx
, ISA_MIPS32R6
);
8969 TCGv_i32 fp0
= tcg_temp_new_i32();
8970 gen_load_fpr32(ctx
, fp0
, fs
);
8971 gen_helper_float_class_s(fp0
, fp0
);
8972 gen_store_fpr32(ctx
, fp0
, fd
);
8973 tcg_temp_free_i32(fp0
);
8976 case OPC_MIN_S
: /* OPC_RECIP2_S */
8977 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8979 TCGv_i32 fp0
= tcg_temp_new_i32();
8980 TCGv_i32 fp1
= tcg_temp_new_i32();
8981 TCGv_i32 fp2
= tcg_temp_new_i32();
8982 gen_load_fpr32(ctx
, fp0
, fs
);
8983 gen_load_fpr32(ctx
, fp1
, ft
);
8984 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8985 gen_store_fpr32(ctx
, fp2
, fd
);
8986 tcg_temp_free_i32(fp2
);
8987 tcg_temp_free_i32(fp1
);
8988 tcg_temp_free_i32(fp0
);
8991 check_cp1_64bitmode(ctx
);
8993 TCGv_i32 fp0
= tcg_temp_new_i32();
8994 TCGv_i32 fp1
= tcg_temp_new_i32();
8996 gen_load_fpr32(ctx
, fp0
, fs
);
8997 gen_load_fpr32(ctx
, fp1
, ft
);
8998 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
8999 tcg_temp_free_i32(fp1
);
9000 gen_store_fpr32(ctx
, fp0
, fd
);
9001 tcg_temp_free_i32(fp0
);
9005 case OPC_MINA_S
: /* OPC_RECIP1_S */
9006 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9008 TCGv_i32 fp0
= tcg_temp_new_i32();
9009 TCGv_i32 fp1
= tcg_temp_new_i32();
9010 TCGv_i32 fp2
= tcg_temp_new_i32();
9011 gen_load_fpr32(ctx
, fp0
, fs
);
9012 gen_load_fpr32(ctx
, fp1
, ft
);
9013 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9014 gen_store_fpr32(ctx
, fp2
, fd
);
9015 tcg_temp_free_i32(fp2
);
9016 tcg_temp_free_i32(fp1
);
9017 tcg_temp_free_i32(fp0
);
9020 check_cp1_64bitmode(ctx
);
9022 TCGv_i32 fp0
= tcg_temp_new_i32();
9024 gen_load_fpr32(ctx
, fp0
, fs
);
9025 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9026 gen_store_fpr32(ctx
, fp0
, fd
);
9027 tcg_temp_free_i32(fp0
);
9031 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9032 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9034 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 TCGv_i32 fp1
= tcg_temp_new_i32();
9036 gen_load_fpr32(ctx
, fp0
, fs
);
9037 gen_load_fpr32(ctx
, fp1
, ft
);
9038 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9039 gen_store_fpr32(ctx
, fp1
, fd
);
9040 tcg_temp_free_i32(fp1
);
9041 tcg_temp_free_i32(fp0
);
9044 check_cp1_64bitmode(ctx
);
9046 TCGv_i32 fp0
= tcg_temp_new_i32();
9048 gen_load_fpr32(ctx
, fp0
, fs
);
9049 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9050 gen_store_fpr32(ctx
, fp0
, fd
);
9051 tcg_temp_free_i32(fp0
);
9055 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9056 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9058 TCGv_i32 fp0
= tcg_temp_new_i32();
9059 TCGv_i32 fp1
= tcg_temp_new_i32();
9060 gen_load_fpr32(ctx
, fp0
, fs
);
9061 gen_load_fpr32(ctx
, fp1
, ft
);
9062 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9063 gen_store_fpr32(ctx
, fp1
, fd
);
9064 tcg_temp_free_i32(fp1
);
9065 tcg_temp_free_i32(fp0
);
9068 check_cp1_64bitmode(ctx
);
9070 TCGv_i32 fp0
= tcg_temp_new_i32();
9071 TCGv_i32 fp1
= tcg_temp_new_i32();
9073 gen_load_fpr32(ctx
, fp0
, fs
);
9074 gen_load_fpr32(ctx
, fp1
, ft
);
9075 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9076 tcg_temp_free_i32(fp1
);
9077 gen_store_fpr32(ctx
, fp0
, fd
);
9078 tcg_temp_free_i32(fp0
);
9083 check_cp1_registers(ctx
, fd
);
9085 TCGv_i32 fp32
= tcg_temp_new_i32();
9086 TCGv_i64 fp64
= tcg_temp_new_i64();
9088 gen_load_fpr32(ctx
, fp32
, fs
);
9089 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9090 tcg_temp_free_i32(fp32
);
9091 gen_store_fpr64(ctx
, fp64
, fd
);
9092 tcg_temp_free_i64(fp64
);
9097 TCGv_i32 fp0
= tcg_temp_new_i32();
9099 gen_load_fpr32(ctx
, fp0
, fs
);
9100 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9101 gen_store_fpr32(ctx
, fp0
, fd
);
9102 tcg_temp_free_i32(fp0
);
9106 check_cp1_64bitmode(ctx
);
9108 TCGv_i32 fp32
= tcg_temp_new_i32();
9109 TCGv_i64 fp64
= tcg_temp_new_i64();
9111 gen_load_fpr32(ctx
, fp32
, fs
);
9112 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9113 tcg_temp_free_i32(fp32
);
9114 gen_store_fpr64(ctx
, fp64
, fd
);
9115 tcg_temp_free_i64(fp64
);
9121 TCGv_i64 fp64
= tcg_temp_new_i64();
9122 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9123 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9125 gen_load_fpr32(ctx
, fp32_0
, fs
);
9126 gen_load_fpr32(ctx
, fp32_1
, ft
);
9127 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9128 tcg_temp_free_i32(fp32_1
);
9129 tcg_temp_free_i32(fp32_0
);
9130 gen_store_fpr64(ctx
, fp64
, fd
);
9131 tcg_temp_free_i64(fp64
);
9143 case OPC_CMP_NGLE_S
:
9150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9151 if (ctx
->opcode
& (1 << 6)) {
9152 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9154 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9158 check_cp1_registers(ctx
, fs
| ft
| fd
);
9160 TCGv_i64 fp0
= tcg_temp_new_i64();
9161 TCGv_i64 fp1
= tcg_temp_new_i64();
9163 gen_load_fpr64(ctx
, fp0
, fs
);
9164 gen_load_fpr64(ctx
, fp1
, ft
);
9165 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9166 tcg_temp_free_i64(fp1
);
9167 gen_store_fpr64(ctx
, fp0
, fd
);
9168 tcg_temp_free_i64(fp0
);
9172 check_cp1_registers(ctx
, fs
| ft
| fd
);
9174 TCGv_i64 fp0
= tcg_temp_new_i64();
9175 TCGv_i64 fp1
= tcg_temp_new_i64();
9177 gen_load_fpr64(ctx
, fp0
, fs
);
9178 gen_load_fpr64(ctx
, fp1
, ft
);
9179 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9180 tcg_temp_free_i64(fp1
);
9181 gen_store_fpr64(ctx
, fp0
, fd
);
9182 tcg_temp_free_i64(fp0
);
9186 check_cp1_registers(ctx
, fs
| ft
| fd
);
9188 TCGv_i64 fp0
= tcg_temp_new_i64();
9189 TCGv_i64 fp1
= tcg_temp_new_i64();
9191 gen_load_fpr64(ctx
, fp0
, fs
);
9192 gen_load_fpr64(ctx
, fp1
, ft
);
9193 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9194 tcg_temp_free_i64(fp1
);
9195 gen_store_fpr64(ctx
, fp0
, fd
);
9196 tcg_temp_free_i64(fp0
);
9200 check_cp1_registers(ctx
, fs
| ft
| fd
);
9202 TCGv_i64 fp0
= tcg_temp_new_i64();
9203 TCGv_i64 fp1
= tcg_temp_new_i64();
9205 gen_load_fpr64(ctx
, fp0
, fs
);
9206 gen_load_fpr64(ctx
, fp1
, ft
);
9207 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9208 tcg_temp_free_i64(fp1
);
9209 gen_store_fpr64(ctx
, fp0
, fd
);
9210 tcg_temp_free_i64(fp0
);
9214 check_cp1_registers(ctx
, fs
| fd
);
9216 TCGv_i64 fp0
= tcg_temp_new_i64();
9218 gen_load_fpr64(ctx
, fp0
, fs
);
9219 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9220 gen_store_fpr64(ctx
, fp0
, fd
);
9221 tcg_temp_free_i64(fp0
);
9225 check_cp1_registers(ctx
, fs
| fd
);
9227 TCGv_i64 fp0
= tcg_temp_new_i64();
9229 gen_load_fpr64(ctx
, fp0
, fs
);
9230 gen_helper_float_abs_d(fp0
, fp0
);
9231 gen_store_fpr64(ctx
, fp0
, fd
);
9232 tcg_temp_free_i64(fp0
);
9236 check_cp1_registers(ctx
, fs
| fd
);
9238 TCGv_i64 fp0
= tcg_temp_new_i64();
9240 gen_load_fpr64(ctx
, fp0
, fs
);
9241 gen_store_fpr64(ctx
, fp0
, fd
);
9242 tcg_temp_free_i64(fp0
);
9246 check_cp1_registers(ctx
, fs
| fd
);
9248 TCGv_i64 fp0
= tcg_temp_new_i64();
9250 gen_load_fpr64(ctx
, fp0
, fs
);
9251 gen_helper_float_chs_d(fp0
, fp0
);
9252 gen_store_fpr64(ctx
, fp0
, fd
);
9253 tcg_temp_free_i64(fp0
);
9257 check_cp1_64bitmode(ctx
);
9259 TCGv_i64 fp0
= tcg_temp_new_i64();
9261 gen_load_fpr64(ctx
, fp0
, fs
);
9262 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9263 gen_store_fpr64(ctx
, fp0
, fd
);
9264 tcg_temp_free_i64(fp0
);
9268 check_cp1_64bitmode(ctx
);
9270 TCGv_i64 fp0
= tcg_temp_new_i64();
9272 gen_load_fpr64(ctx
, fp0
, fs
);
9273 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9274 gen_store_fpr64(ctx
, fp0
, fd
);
9275 tcg_temp_free_i64(fp0
);
9279 check_cp1_64bitmode(ctx
);
9281 TCGv_i64 fp0
= tcg_temp_new_i64();
9283 gen_load_fpr64(ctx
, fp0
, fs
);
9284 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9285 gen_store_fpr64(ctx
, fp0
, fd
);
9286 tcg_temp_free_i64(fp0
);
9290 check_cp1_64bitmode(ctx
);
9292 TCGv_i64 fp0
= tcg_temp_new_i64();
9294 gen_load_fpr64(ctx
, fp0
, fs
);
9295 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9296 gen_store_fpr64(ctx
, fp0
, fd
);
9297 tcg_temp_free_i64(fp0
);
9301 check_cp1_registers(ctx
, fs
);
9303 TCGv_i32 fp32
= tcg_temp_new_i32();
9304 TCGv_i64 fp64
= tcg_temp_new_i64();
9306 gen_load_fpr64(ctx
, fp64
, fs
);
9307 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9308 tcg_temp_free_i64(fp64
);
9309 gen_store_fpr32(ctx
, fp32
, fd
);
9310 tcg_temp_free_i32(fp32
);
9314 check_cp1_registers(ctx
, fs
);
9316 TCGv_i32 fp32
= tcg_temp_new_i32();
9317 TCGv_i64 fp64
= tcg_temp_new_i64();
9319 gen_load_fpr64(ctx
, fp64
, fs
);
9320 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9321 tcg_temp_free_i64(fp64
);
9322 gen_store_fpr32(ctx
, fp32
, fd
);
9323 tcg_temp_free_i32(fp32
);
9327 check_cp1_registers(ctx
, fs
);
9329 TCGv_i32 fp32
= tcg_temp_new_i32();
9330 TCGv_i64 fp64
= tcg_temp_new_i64();
9332 gen_load_fpr64(ctx
, fp64
, fs
);
9333 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9334 tcg_temp_free_i64(fp64
);
9335 gen_store_fpr32(ctx
, fp32
, fd
);
9336 tcg_temp_free_i32(fp32
);
9340 check_cp1_registers(ctx
, fs
);
9342 TCGv_i32 fp32
= tcg_temp_new_i32();
9343 TCGv_i64 fp64
= tcg_temp_new_i64();
9345 gen_load_fpr64(ctx
, fp64
, fs
);
9346 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9347 tcg_temp_free_i64(fp64
);
9348 gen_store_fpr32(ctx
, fp32
, fd
);
9349 tcg_temp_free_i32(fp32
);
9353 check_insn(ctx
, ISA_MIPS32R6
);
9354 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9357 check_insn(ctx
, ISA_MIPS32R6
);
9358 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9361 check_insn(ctx
, ISA_MIPS32R6
);
9362 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9366 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9371 TCGLabel
*l1
= gen_new_label();
9375 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9377 fp0
= tcg_temp_new_i64();
9378 gen_load_fpr64(ctx
, fp0
, fs
);
9379 gen_store_fpr64(ctx
, fp0
, fd
);
9380 tcg_temp_free_i64(fp0
);
9385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9387 TCGLabel
*l1
= gen_new_label();
9391 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9392 fp0
= tcg_temp_new_i64();
9393 gen_load_fpr64(ctx
, fp0
, fs
);
9394 gen_store_fpr64(ctx
, fp0
, fd
);
9395 tcg_temp_free_i64(fp0
);
9401 check_cp1_registers(ctx
, fs
| fd
);
9403 TCGv_i64 fp0
= tcg_temp_new_i64();
9405 gen_load_fpr64(ctx
, fp0
, fs
);
9406 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9407 gen_store_fpr64(ctx
, fp0
, fd
);
9408 tcg_temp_free_i64(fp0
);
9412 check_cp1_registers(ctx
, fs
| fd
);
9414 TCGv_i64 fp0
= tcg_temp_new_i64();
9416 gen_load_fpr64(ctx
, fp0
, fs
);
9417 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9418 gen_store_fpr64(ctx
, fp0
, fd
);
9419 tcg_temp_free_i64(fp0
);
9423 check_insn(ctx
, ISA_MIPS32R6
);
9425 TCGv_i64 fp0
= tcg_temp_new_i64();
9426 TCGv_i64 fp1
= tcg_temp_new_i64();
9427 TCGv_i64 fp2
= tcg_temp_new_i64();
9428 gen_load_fpr64(ctx
, fp0
, fs
);
9429 gen_load_fpr64(ctx
, fp1
, ft
);
9430 gen_load_fpr64(ctx
, fp2
, fd
);
9431 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9432 gen_store_fpr64(ctx
, fp2
, fd
);
9433 tcg_temp_free_i64(fp2
);
9434 tcg_temp_free_i64(fp1
);
9435 tcg_temp_free_i64(fp0
);
9439 check_insn(ctx
, ISA_MIPS32R6
);
9441 TCGv_i64 fp0
= tcg_temp_new_i64();
9442 TCGv_i64 fp1
= tcg_temp_new_i64();
9443 TCGv_i64 fp2
= tcg_temp_new_i64();
9444 gen_load_fpr64(ctx
, fp0
, fs
);
9445 gen_load_fpr64(ctx
, fp1
, ft
);
9446 gen_load_fpr64(ctx
, fp2
, fd
);
9447 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9448 gen_store_fpr64(ctx
, fp2
, fd
);
9449 tcg_temp_free_i64(fp2
);
9450 tcg_temp_free_i64(fp1
);
9451 tcg_temp_free_i64(fp0
);
9455 check_insn(ctx
, ISA_MIPS32R6
);
9457 TCGv_i64 fp0
= tcg_temp_new_i64();
9458 gen_load_fpr64(ctx
, fp0
, fs
);
9459 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9460 gen_store_fpr64(ctx
, fp0
, fd
);
9461 tcg_temp_free_i64(fp0
);
9465 check_insn(ctx
, ISA_MIPS32R6
);
9467 TCGv_i64 fp0
= tcg_temp_new_i64();
9468 gen_load_fpr64(ctx
, fp0
, fs
);
9469 gen_helper_float_class_d(fp0
, fp0
);
9470 gen_store_fpr64(ctx
, fp0
, fd
);
9471 tcg_temp_free_i64(fp0
);
9474 case OPC_MIN_D
: /* OPC_RECIP2_D */
9475 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9477 TCGv_i64 fp0
= tcg_temp_new_i64();
9478 TCGv_i64 fp1
= tcg_temp_new_i64();
9479 gen_load_fpr64(ctx
, fp0
, fs
);
9480 gen_load_fpr64(ctx
, fp1
, ft
);
9481 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9482 gen_store_fpr64(ctx
, fp1
, fd
);
9483 tcg_temp_free_i64(fp1
);
9484 tcg_temp_free_i64(fp0
);
9487 check_cp1_64bitmode(ctx
);
9489 TCGv_i64 fp0
= tcg_temp_new_i64();
9490 TCGv_i64 fp1
= tcg_temp_new_i64();
9492 gen_load_fpr64(ctx
, fp0
, fs
);
9493 gen_load_fpr64(ctx
, fp1
, ft
);
9494 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9495 tcg_temp_free_i64(fp1
);
9496 gen_store_fpr64(ctx
, fp0
, fd
);
9497 tcg_temp_free_i64(fp0
);
9501 case OPC_MINA_D
: /* OPC_RECIP1_D */
9502 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9504 TCGv_i64 fp0
= tcg_temp_new_i64();
9505 TCGv_i64 fp1
= tcg_temp_new_i64();
9506 gen_load_fpr64(ctx
, fp0
, fs
);
9507 gen_load_fpr64(ctx
, fp1
, ft
);
9508 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9509 gen_store_fpr64(ctx
, fp1
, fd
);
9510 tcg_temp_free_i64(fp1
);
9511 tcg_temp_free_i64(fp0
);
9514 check_cp1_64bitmode(ctx
);
9516 TCGv_i64 fp0
= tcg_temp_new_i64();
9518 gen_load_fpr64(ctx
, fp0
, fs
);
9519 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9520 gen_store_fpr64(ctx
, fp0
, fd
);
9521 tcg_temp_free_i64(fp0
);
9525 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9526 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9528 TCGv_i64 fp0
= tcg_temp_new_i64();
9529 TCGv_i64 fp1
= tcg_temp_new_i64();
9530 gen_load_fpr64(ctx
, fp0
, fs
);
9531 gen_load_fpr64(ctx
, fp1
, ft
);
9532 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9533 gen_store_fpr64(ctx
, fp1
, fd
);
9534 tcg_temp_free_i64(fp1
);
9535 tcg_temp_free_i64(fp0
);
9538 check_cp1_64bitmode(ctx
);
9540 TCGv_i64 fp0
= tcg_temp_new_i64();
9542 gen_load_fpr64(ctx
, fp0
, fs
);
9543 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9544 gen_store_fpr64(ctx
, fp0
, fd
);
9545 tcg_temp_free_i64(fp0
);
9549 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9550 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9552 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 TCGv_i64 fp1
= tcg_temp_new_i64();
9554 gen_load_fpr64(ctx
, fp0
, fs
);
9555 gen_load_fpr64(ctx
, fp1
, ft
);
9556 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9557 gen_store_fpr64(ctx
, fp1
, fd
);
9558 tcg_temp_free_i64(fp1
);
9559 tcg_temp_free_i64(fp0
);
9562 check_cp1_64bitmode(ctx
);
9564 TCGv_i64 fp0
= tcg_temp_new_i64();
9565 TCGv_i64 fp1
= tcg_temp_new_i64();
9567 gen_load_fpr64(ctx
, fp0
, fs
);
9568 gen_load_fpr64(ctx
, fp1
, ft
);
9569 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9570 tcg_temp_free_i64(fp1
);
9571 gen_store_fpr64(ctx
, fp0
, fd
);
9572 tcg_temp_free_i64(fp0
);
9585 case OPC_CMP_NGLE_D
:
9592 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9593 if (ctx
->opcode
& (1 << 6)) {
9594 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9596 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9600 check_cp1_registers(ctx
, fs
);
9602 TCGv_i32 fp32
= tcg_temp_new_i32();
9603 TCGv_i64 fp64
= tcg_temp_new_i64();
9605 gen_load_fpr64(ctx
, fp64
, fs
);
9606 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9607 tcg_temp_free_i64(fp64
);
9608 gen_store_fpr32(ctx
, fp32
, fd
);
9609 tcg_temp_free_i32(fp32
);
9613 check_cp1_registers(ctx
, fs
);
9615 TCGv_i32 fp32
= tcg_temp_new_i32();
9616 TCGv_i64 fp64
= tcg_temp_new_i64();
9618 gen_load_fpr64(ctx
, fp64
, fs
);
9619 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9620 tcg_temp_free_i64(fp64
);
9621 gen_store_fpr32(ctx
, fp32
, fd
);
9622 tcg_temp_free_i32(fp32
);
9626 check_cp1_64bitmode(ctx
);
9628 TCGv_i64 fp0
= tcg_temp_new_i64();
9630 gen_load_fpr64(ctx
, fp0
, fs
);
9631 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9632 gen_store_fpr64(ctx
, fp0
, fd
);
9633 tcg_temp_free_i64(fp0
);
9638 TCGv_i32 fp0
= tcg_temp_new_i32();
9640 gen_load_fpr32(ctx
, fp0
, fs
);
9641 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9642 gen_store_fpr32(ctx
, fp0
, fd
);
9643 tcg_temp_free_i32(fp0
);
9647 check_cp1_registers(ctx
, fd
);
9649 TCGv_i32 fp32
= tcg_temp_new_i32();
9650 TCGv_i64 fp64
= tcg_temp_new_i64();
9652 gen_load_fpr32(ctx
, fp32
, fs
);
9653 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9654 tcg_temp_free_i32(fp32
);
9655 gen_store_fpr64(ctx
, fp64
, fd
);
9656 tcg_temp_free_i64(fp64
);
9660 check_cp1_64bitmode(ctx
);
9662 TCGv_i32 fp32
= tcg_temp_new_i32();
9663 TCGv_i64 fp64
= tcg_temp_new_i64();
9665 gen_load_fpr64(ctx
, fp64
, fs
);
9666 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9667 tcg_temp_free_i64(fp64
);
9668 gen_store_fpr32(ctx
, fp32
, fd
);
9669 tcg_temp_free_i32(fp32
);
9673 check_cp1_64bitmode(ctx
);
9675 TCGv_i64 fp0
= tcg_temp_new_i64();
9677 gen_load_fpr64(ctx
, fp0
, fs
);
9678 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9679 gen_store_fpr64(ctx
, fp0
, fd
);
9680 tcg_temp_free_i64(fp0
);
9686 TCGv_i64 fp0
= tcg_temp_new_i64();
9688 gen_load_fpr64(ctx
, fp0
, fs
);
9689 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9690 gen_store_fpr64(ctx
, fp0
, fd
);
9691 tcg_temp_free_i64(fp0
);
9697 TCGv_i64 fp0
= tcg_temp_new_i64();
9698 TCGv_i64 fp1
= tcg_temp_new_i64();
9700 gen_load_fpr64(ctx
, fp0
, fs
);
9701 gen_load_fpr64(ctx
, fp1
, ft
);
9702 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9703 tcg_temp_free_i64(fp1
);
9704 gen_store_fpr64(ctx
, fp0
, fd
);
9705 tcg_temp_free_i64(fp0
);
9711 TCGv_i64 fp0
= tcg_temp_new_i64();
9712 TCGv_i64 fp1
= tcg_temp_new_i64();
9714 gen_load_fpr64(ctx
, fp0
, fs
);
9715 gen_load_fpr64(ctx
, fp1
, ft
);
9716 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9717 tcg_temp_free_i64(fp1
);
9718 gen_store_fpr64(ctx
, fp0
, fd
);
9719 tcg_temp_free_i64(fp0
);
9725 TCGv_i64 fp0
= tcg_temp_new_i64();
9726 TCGv_i64 fp1
= tcg_temp_new_i64();
9728 gen_load_fpr64(ctx
, fp0
, fs
);
9729 gen_load_fpr64(ctx
, fp1
, ft
);
9730 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9731 tcg_temp_free_i64(fp1
);
9732 gen_store_fpr64(ctx
, fp0
, fd
);
9733 tcg_temp_free_i64(fp0
);
9739 TCGv_i64 fp0
= tcg_temp_new_i64();
9741 gen_load_fpr64(ctx
, fp0
, fs
);
9742 gen_helper_float_abs_ps(fp0
, fp0
);
9743 gen_store_fpr64(ctx
, fp0
, fd
);
9744 tcg_temp_free_i64(fp0
);
9750 TCGv_i64 fp0
= tcg_temp_new_i64();
9752 gen_load_fpr64(ctx
, fp0
, fs
);
9753 gen_store_fpr64(ctx
, fp0
, fd
);
9754 tcg_temp_free_i64(fp0
);
9760 TCGv_i64 fp0
= tcg_temp_new_i64();
9762 gen_load_fpr64(ctx
, fp0
, fs
);
9763 gen_helper_float_chs_ps(fp0
, fp0
);
9764 gen_store_fpr64(ctx
, fp0
, fd
);
9765 tcg_temp_free_i64(fp0
);
9770 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9775 TCGLabel
*l1
= gen_new_label();
9779 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9780 fp0
= tcg_temp_new_i64();
9781 gen_load_fpr64(ctx
, fp0
, fs
);
9782 gen_store_fpr64(ctx
, fp0
, fd
);
9783 tcg_temp_free_i64(fp0
);
9790 TCGLabel
*l1
= gen_new_label();
9794 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9795 fp0
= tcg_temp_new_i64();
9796 gen_load_fpr64(ctx
, fp0
, fs
);
9797 gen_store_fpr64(ctx
, fp0
, fd
);
9798 tcg_temp_free_i64(fp0
);
9806 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 TCGv_i64 fp1
= tcg_temp_new_i64();
9809 gen_load_fpr64(ctx
, fp0
, ft
);
9810 gen_load_fpr64(ctx
, fp1
, fs
);
9811 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9812 tcg_temp_free_i64(fp1
);
9813 gen_store_fpr64(ctx
, fp0
, fd
);
9814 tcg_temp_free_i64(fp0
);
9820 TCGv_i64 fp0
= tcg_temp_new_i64();
9821 TCGv_i64 fp1
= tcg_temp_new_i64();
9823 gen_load_fpr64(ctx
, fp0
, ft
);
9824 gen_load_fpr64(ctx
, fp1
, fs
);
9825 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9826 tcg_temp_free_i64(fp1
);
9827 gen_store_fpr64(ctx
, fp0
, fd
);
9828 tcg_temp_free_i64(fp0
);
9834 TCGv_i64 fp0
= tcg_temp_new_i64();
9835 TCGv_i64 fp1
= tcg_temp_new_i64();
9837 gen_load_fpr64(ctx
, fp0
, fs
);
9838 gen_load_fpr64(ctx
, fp1
, ft
);
9839 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9840 tcg_temp_free_i64(fp1
);
9841 gen_store_fpr64(ctx
, fp0
, fd
);
9842 tcg_temp_free_i64(fp0
);
9848 TCGv_i64 fp0
= tcg_temp_new_i64();
9850 gen_load_fpr64(ctx
, fp0
, fs
);
9851 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9852 gen_store_fpr64(ctx
, fp0
, fd
);
9853 tcg_temp_free_i64(fp0
);
9859 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp0
, fs
);
9862 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9863 gen_store_fpr64(ctx
, fp0
, fd
);
9864 tcg_temp_free_i64(fp0
);
9870 TCGv_i64 fp0
= tcg_temp_new_i64();
9871 TCGv_i64 fp1
= tcg_temp_new_i64();
9873 gen_load_fpr64(ctx
, fp0
, fs
);
9874 gen_load_fpr64(ctx
, fp1
, ft
);
9875 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9876 tcg_temp_free_i64(fp1
);
9877 gen_store_fpr64(ctx
, fp0
, fd
);
9878 tcg_temp_free_i64(fp0
);
9882 check_cp1_64bitmode(ctx
);
9884 TCGv_i32 fp0
= tcg_temp_new_i32();
9886 gen_load_fpr32h(ctx
, fp0
, fs
);
9887 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9888 gen_store_fpr32(ctx
, fp0
, fd
);
9889 tcg_temp_free_i32(fp0
);
9895 TCGv_i64 fp0
= tcg_temp_new_i64();
9897 gen_load_fpr64(ctx
, fp0
, fs
);
9898 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9899 gen_store_fpr64(ctx
, fp0
, fd
);
9900 tcg_temp_free_i64(fp0
);
9904 check_cp1_64bitmode(ctx
);
9906 TCGv_i32 fp0
= tcg_temp_new_i32();
9908 gen_load_fpr32(ctx
, fp0
, fs
);
9909 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9910 gen_store_fpr32(ctx
, fp0
, fd
);
9911 tcg_temp_free_i32(fp0
);
9917 TCGv_i32 fp0
= tcg_temp_new_i32();
9918 TCGv_i32 fp1
= tcg_temp_new_i32();
9920 gen_load_fpr32(ctx
, fp0
, fs
);
9921 gen_load_fpr32(ctx
, fp1
, ft
);
9922 gen_store_fpr32h(ctx
, fp0
, fd
);
9923 gen_store_fpr32(ctx
, fp1
, fd
);
9924 tcg_temp_free_i32(fp0
);
9925 tcg_temp_free_i32(fp1
);
9931 TCGv_i32 fp0
= tcg_temp_new_i32();
9932 TCGv_i32 fp1
= tcg_temp_new_i32();
9934 gen_load_fpr32(ctx
, fp0
, fs
);
9935 gen_load_fpr32h(ctx
, fp1
, ft
);
9936 gen_store_fpr32(ctx
, fp1
, fd
);
9937 gen_store_fpr32h(ctx
, fp0
, fd
);
9938 tcg_temp_free_i32(fp0
);
9939 tcg_temp_free_i32(fp1
);
9945 TCGv_i32 fp0
= tcg_temp_new_i32();
9946 TCGv_i32 fp1
= tcg_temp_new_i32();
9948 gen_load_fpr32h(ctx
, fp0
, fs
);
9949 gen_load_fpr32(ctx
, fp1
, ft
);
9950 gen_store_fpr32(ctx
, fp1
, fd
);
9951 gen_store_fpr32h(ctx
, fp0
, fd
);
9952 tcg_temp_free_i32(fp0
);
9953 tcg_temp_free_i32(fp1
);
9959 TCGv_i32 fp0
= tcg_temp_new_i32();
9960 TCGv_i32 fp1
= tcg_temp_new_i32();
9962 gen_load_fpr32h(ctx
, fp0
, fs
);
9963 gen_load_fpr32h(ctx
, fp1
, ft
);
9964 gen_store_fpr32(ctx
, fp1
, fd
);
9965 gen_store_fpr32h(ctx
, fp0
, fd
);
9966 tcg_temp_free_i32(fp0
);
9967 tcg_temp_free_i32(fp1
);
9973 case OPC_CMP_UEQ_PS
:
9974 case OPC_CMP_OLT_PS
:
9975 case OPC_CMP_ULT_PS
:
9976 case OPC_CMP_OLE_PS
:
9977 case OPC_CMP_ULE_PS
:
9979 case OPC_CMP_NGLE_PS
:
9980 case OPC_CMP_SEQ_PS
:
9981 case OPC_CMP_NGL_PS
:
9983 case OPC_CMP_NGE_PS
:
9985 case OPC_CMP_NGT_PS
:
9986 if (ctx
->opcode
& (1 << 6)) {
9987 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
9989 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
9993 MIPS_INVAL("farith");
9994 generate_exception_end(ctx
, EXCP_RI
);
9999 /* Coprocessor 3 (FPU) */
10000 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10001 int fd
, int fs
, int base
, int index
)
10003 TCGv t0
= tcg_temp_new();
10006 gen_load_gpr(t0
, index
);
10007 } else if (index
== 0) {
10008 gen_load_gpr(t0
, base
);
10010 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10012 /* Don't do NOP if destination is zero: we must perform the actual
10018 TCGv_i32 fp0
= tcg_temp_new_i32();
10020 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10021 tcg_gen_trunc_tl_i32(fp0
, t0
);
10022 gen_store_fpr32(ctx
, fp0
, fd
);
10023 tcg_temp_free_i32(fp0
);
10028 check_cp1_registers(ctx
, fd
);
10030 TCGv_i64 fp0
= tcg_temp_new_i64();
10031 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10032 gen_store_fpr64(ctx
, fp0
, fd
);
10033 tcg_temp_free_i64(fp0
);
10037 check_cp1_64bitmode(ctx
);
10038 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10040 TCGv_i64 fp0
= tcg_temp_new_i64();
10042 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10043 gen_store_fpr64(ctx
, fp0
, fd
);
10044 tcg_temp_free_i64(fp0
);
10050 TCGv_i32 fp0
= tcg_temp_new_i32();
10051 gen_load_fpr32(ctx
, fp0
, fs
);
10052 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10053 tcg_temp_free_i32(fp0
);
10058 check_cp1_registers(ctx
, fs
);
10060 TCGv_i64 fp0
= tcg_temp_new_i64();
10061 gen_load_fpr64(ctx
, fp0
, fs
);
10062 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10063 tcg_temp_free_i64(fp0
);
10067 check_cp1_64bitmode(ctx
);
10068 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10070 TCGv_i64 fp0
= tcg_temp_new_i64();
10071 gen_load_fpr64(ctx
, fp0
, fs
);
10072 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10073 tcg_temp_free_i64(fp0
);
10080 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10081 int fd
, int fr
, int fs
, int ft
)
10087 TCGv t0
= tcg_temp_local_new();
10088 TCGv_i32 fp
= tcg_temp_new_i32();
10089 TCGv_i32 fph
= tcg_temp_new_i32();
10090 TCGLabel
*l1
= gen_new_label();
10091 TCGLabel
*l2
= gen_new_label();
10093 gen_load_gpr(t0
, fr
);
10094 tcg_gen_andi_tl(t0
, t0
, 0x7);
10096 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10097 gen_load_fpr32(ctx
, fp
, fs
);
10098 gen_load_fpr32h(ctx
, fph
, fs
);
10099 gen_store_fpr32(ctx
, fp
, fd
);
10100 gen_store_fpr32h(ctx
, fph
, fd
);
10103 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10105 #ifdef TARGET_WORDS_BIGENDIAN
10106 gen_load_fpr32(ctx
, fp
, fs
);
10107 gen_load_fpr32h(ctx
, fph
, ft
);
10108 gen_store_fpr32h(ctx
, fp
, fd
);
10109 gen_store_fpr32(ctx
, fph
, fd
);
10111 gen_load_fpr32h(ctx
, fph
, fs
);
10112 gen_load_fpr32(ctx
, fp
, ft
);
10113 gen_store_fpr32(ctx
, fph
, fd
);
10114 gen_store_fpr32h(ctx
, fp
, fd
);
10117 tcg_temp_free_i32(fp
);
10118 tcg_temp_free_i32(fph
);
10124 TCGv_i32 fp0
= tcg_temp_new_i32();
10125 TCGv_i32 fp1
= tcg_temp_new_i32();
10126 TCGv_i32 fp2
= tcg_temp_new_i32();
10128 gen_load_fpr32(ctx
, fp0
, fs
);
10129 gen_load_fpr32(ctx
, fp1
, ft
);
10130 gen_load_fpr32(ctx
, fp2
, fr
);
10131 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10132 tcg_temp_free_i32(fp0
);
10133 tcg_temp_free_i32(fp1
);
10134 gen_store_fpr32(ctx
, fp2
, fd
);
10135 tcg_temp_free_i32(fp2
);
10140 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10142 TCGv_i64 fp0
= tcg_temp_new_i64();
10143 TCGv_i64 fp1
= tcg_temp_new_i64();
10144 TCGv_i64 fp2
= tcg_temp_new_i64();
10146 gen_load_fpr64(ctx
, fp0
, fs
);
10147 gen_load_fpr64(ctx
, fp1
, ft
);
10148 gen_load_fpr64(ctx
, fp2
, fr
);
10149 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10150 tcg_temp_free_i64(fp0
);
10151 tcg_temp_free_i64(fp1
);
10152 gen_store_fpr64(ctx
, fp2
, fd
);
10153 tcg_temp_free_i64(fp2
);
10159 TCGv_i64 fp0
= tcg_temp_new_i64();
10160 TCGv_i64 fp1
= tcg_temp_new_i64();
10161 TCGv_i64 fp2
= tcg_temp_new_i64();
10163 gen_load_fpr64(ctx
, fp0
, fs
);
10164 gen_load_fpr64(ctx
, fp1
, ft
);
10165 gen_load_fpr64(ctx
, fp2
, fr
);
10166 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10167 tcg_temp_free_i64(fp0
);
10168 tcg_temp_free_i64(fp1
);
10169 gen_store_fpr64(ctx
, fp2
, fd
);
10170 tcg_temp_free_i64(fp2
);
10176 TCGv_i32 fp0
= tcg_temp_new_i32();
10177 TCGv_i32 fp1
= tcg_temp_new_i32();
10178 TCGv_i32 fp2
= tcg_temp_new_i32();
10180 gen_load_fpr32(ctx
, fp0
, fs
);
10181 gen_load_fpr32(ctx
, fp1
, ft
);
10182 gen_load_fpr32(ctx
, fp2
, fr
);
10183 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10184 tcg_temp_free_i32(fp0
);
10185 tcg_temp_free_i32(fp1
);
10186 gen_store_fpr32(ctx
, fp2
, fd
);
10187 tcg_temp_free_i32(fp2
);
10192 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10194 TCGv_i64 fp0
= tcg_temp_new_i64();
10195 TCGv_i64 fp1
= tcg_temp_new_i64();
10196 TCGv_i64 fp2
= tcg_temp_new_i64();
10198 gen_load_fpr64(ctx
, fp0
, fs
);
10199 gen_load_fpr64(ctx
, fp1
, ft
);
10200 gen_load_fpr64(ctx
, fp2
, fr
);
10201 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10202 tcg_temp_free_i64(fp0
);
10203 tcg_temp_free_i64(fp1
);
10204 gen_store_fpr64(ctx
, fp2
, fd
);
10205 tcg_temp_free_i64(fp2
);
10211 TCGv_i64 fp0
= tcg_temp_new_i64();
10212 TCGv_i64 fp1
= tcg_temp_new_i64();
10213 TCGv_i64 fp2
= tcg_temp_new_i64();
10215 gen_load_fpr64(ctx
, fp0
, fs
);
10216 gen_load_fpr64(ctx
, fp1
, ft
);
10217 gen_load_fpr64(ctx
, fp2
, fr
);
10218 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10219 tcg_temp_free_i64(fp0
);
10220 tcg_temp_free_i64(fp1
);
10221 gen_store_fpr64(ctx
, fp2
, fd
);
10222 tcg_temp_free_i64(fp2
);
10228 TCGv_i32 fp0
= tcg_temp_new_i32();
10229 TCGv_i32 fp1
= tcg_temp_new_i32();
10230 TCGv_i32 fp2
= tcg_temp_new_i32();
10232 gen_load_fpr32(ctx
, fp0
, fs
);
10233 gen_load_fpr32(ctx
, fp1
, ft
);
10234 gen_load_fpr32(ctx
, fp2
, fr
);
10235 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10236 tcg_temp_free_i32(fp0
);
10237 tcg_temp_free_i32(fp1
);
10238 gen_store_fpr32(ctx
, fp2
, fd
);
10239 tcg_temp_free_i32(fp2
);
10244 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10246 TCGv_i64 fp0
= tcg_temp_new_i64();
10247 TCGv_i64 fp1
= tcg_temp_new_i64();
10248 TCGv_i64 fp2
= tcg_temp_new_i64();
10250 gen_load_fpr64(ctx
, fp0
, fs
);
10251 gen_load_fpr64(ctx
, fp1
, ft
);
10252 gen_load_fpr64(ctx
, fp2
, fr
);
10253 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10254 tcg_temp_free_i64(fp0
);
10255 tcg_temp_free_i64(fp1
);
10256 gen_store_fpr64(ctx
, fp2
, fd
);
10257 tcg_temp_free_i64(fp2
);
10263 TCGv_i64 fp0
= tcg_temp_new_i64();
10264 TCGv_i64 fp1
= tcg_temp_new_i64();
10265 TCGv_i64 fp2
= tcg_temp_new_i64();
10267 gen_load_fpr64(ctx
, fp0
, fs
);
10268 gen_load_fpr64(ctx
, fp1
, ft
);
10269 gen_load_fpr64(ctx
, fp2
, fr
);
10270 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10271 tcg_temp_free_i64(fp0
);
10272 tcg_temp_free_i64(fp1
);
10273 gen_store_fpr64(ctx
, fp2
, fd
);
10274 tcg_temp_free_i64(fp2
);
10280 TCGv_i32 fp0
= tcg_temp_new_i32();
10281 TCGv_i32 fp1
= tcg_temp_new_i32();
10282 TCGv_i32 fp2
= tcg_temp_new_i32();
10284 gen_load_fpr32(ctx
, fp0
, fs
);
10285 gen_load_fpr32(ctx
, fp1
, ft
);
10286 gen_load_fpr32(ctx
, fp2
, fr
);
10287 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10288 tcg_temp_free_i32(fp0
);
10289 tcg_temp_free_i32(fp1
);
10290 gen_store_fpr32(ctx
, fp2
, fd
);
10291 tcg_temp_free_i32(fp2
);
10296 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10298 TCGv_i64 fp0
= tcg_temp_new_i64();
10299 TCGv_i64 fp1
= tcg_temp_new_i64();
10300 TCGv_i64 fp2
= tcg_temp_new_i64();
10302 gen_load_fpr64(ctx
, fp0
, fs
);
10303 gen_load_fpr64(ctx
, fp1
, ft
);
10304 gen_load_fpr64(ctx
, fp2
, fr
);
10305 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10306 tcg_temp_free_i64(fp0
);
10307 tcg_temp_free_i64(fp1
);
10308 gen_store_fpr64(ctx
, fp2
, fd
);
10309 tcg_temp_free_i64(fp2
);
10315 TCGv_i64 fp0
= tcg_temp_new_i64();
10316 TCGv_i64 fp1
= tcg_temp_new_i64();
10317 TCGv_i64 fp2
= tcg_temp_new_i64();
10319 gen_load_fpr64(ctx
, fp0
, fs
);
10320 gen_load_fpr64(ctx
, fp1
, ft
);
10321 gen_load_fpr64(ctx
, fp2
, fr
);
10322 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10323 tcg_temp_free_i64(fp0
);
10324 tcg_temp_free_i64(fp1
);
10325 gen_store_fpr64(ctx
, fp2
, fd
);
10326 tcg_temp_free_i64(fp2
);
10330 MIPS_INVAL("flt3_arith");
10331 generate_exception_end(ctx
, EXCP_RI
);
10336 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
)
10340 #if !defined(CONFIG_USER_ONLY)
10341 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10342 Therefore only check the ISA in system mode. */
10343 check_insn(ctx
, ISA_MIPS32R2
);
10345 t0
= tcg_temp_new();
10349 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10350 gen_store_gpr(t0
, rt
);
10353 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10354 gen_store_gpr(t0
, rt
);
10357 gen_helper_rdhwr_cc(t0
, cpu_env
);
10358 gen_store_gpr(t0
, rt
);
10361 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10362 gen_store_gpr(t0
, rt
);
10365 #if defined(CONFIG_USER_ONLY)
10366 tcg_gen_ld_tl(t0
, cpu_env
,
10367 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10368 gen_store_gpr(t0
, rt
);
10371 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10372 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10373 tcg_gen_ld_tl(t0
, cpu_env
,
10374 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10375 gen_store_gpr(t0
, rt
);
10377 generate_exception_end(ctx
, EXCP_RI
);
10381 default: /* Invalid */
10382 MIPS_INVAL("rdhwr");
10383 generate_exception_end(ctx
, EXCP_RI
);
10389 static inline void clear_branch_hflags(DisasContext
*ctx
)
10391 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10392 if (ctx
->bstate
== BS_NONE
) {
10393 save_cpu_state(ctx
, 0);
10395 /* it is not safe to save ctx->hflags as hflags may be changed
10396 in execution time by the instruction in delay / forbidden slot. */
10397 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10401 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10403 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10404 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10405 /* Branches completion */
10406 clear_branch_hflags(ctx
);
10407 ctx
->bstate
= BS_BRANCH
;
10408 /* FIXME: Need to clear can_do_io. */
10409 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10410 case MIPS_HFLAG_FBNSLOT
:
10411 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10414 /* unconditional branch */
10415 if (proc_hflags
& MIPS_HFLAG_BX
) {
10416 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10418 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10420 case MIPS_HFLAG_BL
:
10421 /* blikely taken case */
10422 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10424 case MIPS_HFLAG_BC
:
10425 /* Conditional branch */
10427 TCGLabel
*l1
= gen_new_label();
10429 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10430 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10432 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10435 case MIPS_HFLAG_BR
:
10436 /* unconditional branch to register */
10437 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10438 TCGv t0
= tcg_temp_new();
10439 TCGv_i32 t1
= tcg_temp_new_i32();
10441 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10442 tcg_gen_trunc_tl_i32(t1
, t0
);
10444 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10445 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10446 tcg_gen_or_i32(hflags
, hflags
, t1
);
10447 tcg_temp_free_i32(t1
);
10449 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10451 tcg_gen_mov_tl(cpu_PC
, btarget
);
10453 if (ctx
->singlestep_enabled
) {
10454 save_cpu_state(ctx
, 0);
10455 gen_helper_raise_exception_debug(cpu_env
);
10457 tcg_gen_exit_tb(0);
10460 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10466 /* Compact Branches */
10467 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10468 int rs
, int rt
, int32_t offset
)
10470 int bcond_compute
= 0;
10471 TCGv t0
= tcg_temp_new();
10472 TCGv t1
= tcg_temp_new();
10473 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10475 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10476 #ifdef MIPS_DEBUG_DISAS
10477 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10480 generate_exception_end(ctx
, EXCP_RI
);
10484 /* Load needed operands and calculate btarget */
10486 /* compact branch */
10487 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10488 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10489 gen_load_gpr(t0
, rs
);
10490 gen_load_gpr(t1
, rt
);
10492 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10493 if (rs
<= rt
&& rs
== 0) {
10494 /* OPC_BEQZALC, OPC_BNEZALC */
10495 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10498 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10499 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10500 gen_load_gpr(t0
, rs
);
10501 gen_load_gpr(t1
, rt
);
10503 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10505 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10506 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10507 if (rs
== 0 || rs
== rt
) {
10508 /* OPC_BLEZALC, OPC_BGEZALC */
10509 /* OPC_BGTZALC, OPC_BLTZALC */
10510 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10512 gen_load_gpr(t0
, rs
);
10513 gen_load_gpr(t1
, rt
);
10515 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10519 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10524 /* OPC_BEQZC, OPC_BNEZC */
10525 gen_load_gpr(t0
, rs
);
10527 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10529 /* OPC_JIC, OPC_JIALC */
10530 TCGv tbase
= tcg_temp_new();
10531 TCGv toffset
= tcg_temp_new();
10533 gen_load_gpr(tbase
, rt
);
10534 tcg_gen_movi_tl(toffset
, offset
);
10535 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10536 tcg_temp_free(tbase
);
10537 tcg_temp_free(toffset
);
10541 MIPS_INVAL("Compact branch/jump");
10542 generate_exception_end(ctx
, EXCP_RI
);
10546 if (bcond_compute
== 0) {
10547 /* Uncoditional compact branch */
10550 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10553 ctx
->hflags
|= MIPS_HFLAG_BR
;
10556 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10559 ctx
->hflags
|= MIPS_HFLAG_B
;
10562 MIPS_INVAL("Compact branch/jump");
10563 generate_exception_end(ctx
, EXCP_RI
);
10567 /* Generating branch here as compact branches don't have delay slot */
10568 gen_branch(ctx
, 4);
10570 /* Conditional compact branch */
10571 TCGLabel
*fs
= gen_new_label();
10572 save_cpu_state(ctx
, 0);
10575 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10576 if (rs
== 0 && rt
!= 0) {
10578 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10579 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10581 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10584 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10587 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10588 if (rs
== 0 && rt
!= 0) {
10590 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10591 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10593 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10596 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10599 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10600 if (rs
== 0 && rt
!= 0) {
10602 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10603 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10605 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10608 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10611 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10612 if (rs
== 0 && rt
!= 0) {
10614 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10615 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10617 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10620 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10623 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10624 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10626 /* OPC_BOVC, OPC_BNVC */
10627 TCGv t2
= tcg_temp_new();
10628 TCGv t3
= tcg_temp_new();
10629 TCGv t4
= tcg_temp_new();
10630 TCGv input_overflow
= tcg_temp_new();
10632 gen_load_gpr(t0
, rs
);
10633 gen_load_gpr(t1
, rt
);
10634 tcg_gen_ext32s_tl(t2
, t0
);
10635 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10636 tcg_gen_ext32s_tl(t3
, t1
);
10637 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10638 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10640 tcg_gen_add_tl(t4
, t2
, t3
);
10641 tcg_gen_ext32s_tl(t4
, t4
);
10642 tcg_gen_xor_tl(t2
, t2
, t3
);
10643 tcg_gen_xor_tl(t3
, t4
, t3
);
10644 tcg_gen_andc_tl(t2
, t3
, t2
);
10645 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10646 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10647 if (opc
== OPC_BOVC
) {
10649 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10652 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10654 tcg_temp_free(input_overflow
);
10658 } else if (rs
< rt
&& rs
== 0) {
10659 /* OPC_BEQZALC, OPC_BNEZALC */
10660 if (opc
== OPC_BEQZALC
) {
10662 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10665 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10668 /* OPC_BEQC, OPC_BNEC */
10669 if (opc
== OPC_BEQC
) {
10671 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10674 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10679 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10682 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10685 MIPS_INVAL("Compact conditional branch/jump");
10686 generate_exception_end(ctx
, EXCP_RI
);
10690 /* Generating branch here as compact branches don't have delay slot */
10691 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10694 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10702 /* ISA extensions (ASEs) */
10703 /* MIPS16 extension to MIPS32 */
10705 /* MIPS16 major opcodes */
10707 M16_OPC_ADDIUSP
= 0x00,
10708 M16_OPC_ADDIUPC
= 0x01,
10710 M16_OPC_JAL
= 0x03,
10711 M16_OPC_BEQZ
= 0x04,
10712 M16_OPC_BNEQZ
= 0x05,
10713 M16_OPC_SHIFT
= 0x06,
10715 M16_OPC_RRIA
= 0x08,
10716 M16_OPC_ADDIU8
= 0x09,
10717 M16_OPC_SLTI
= 0x0a,
10718 M16_OPC_SLTIU
= 0x0b,
10721 M16_OPC_CMPI
= 0x0e,
10725 M16_OPC_LWSP
= 0x12,
10727 M16_OPC_LBU
= 0x14,
10728 M16_OPC_LHU
= 0x15,
10729 M16_OPC_LWPC
= 0x16,
10730 M16_OPC_LWU
= 0x17,
10733 M16_OPC_SWSP
= 0x1a,
10735 M16_OPC_RRR
= 0x1c,
10737 M16_OPC_EXTEND
= 0x1e,
10741 /* I8 funct field */
10760 /* RR funct field */
10794 /* I64 funct field */
10802 I64_DADDIUPC
= 0x6,
10806 /* RR ry field for CNVT */
10808 RR_RY_CNVT_ZEB
= 0x0,
10809 RR_RY_CNVT_ZEH
= 0x1,
10810 RR_RY_CNVT_ZEW
= 0x2,
10811 RR_RY_CNVT_SEB
= 0x4,
10812 RR_RY_CNVT_SEH
= 0x5,
10813 RR_RY_CNVT_SEW
= 0x6,
10816 static int xlat (int r
)
10818 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10823 static void gen_mips16_save (DisasContext
*ctx
,
10824 int xsregs
, int aregs
,
10825 int do_ra
, int do_s0
, int do_s1
,
10828 TCGv t0
= tcg_temp_new();
10829 TCGv t1
= tcg_temp_new();
10830 TCGv t2
= tcg_temp_new();
10860 generate_exception_end(ctx
, EXCP_RI
);
10866 gen_base_offset_addr(ctx
, t0
, 29, 12);
10867 gen_load_gpr(t1
, 7);
10868 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10871 gen_base_offset_addr(ctx
, t0
, 29, 8);
10872 gen_load_gpr(t1
, 6);
10873 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10876 gen_base_offset_addr(ctx
, t0
, 29, 4);
10877 gen_load_gpr(t1
, 5);
10878 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10881 gen_base_offset_addr(ctx
, t0
, 29, 0);
10882 gen_load_gpr(t1
, 4);
10883 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10886 gen_load_gpr(t0
, 29);
10888 #define DECR_AND_STORE(reg) do { \
10889 tcg_gen_movi_tl(t2, -4); \
10890 gen_op_addr_add(ctx, t0, t0, t2); \
10891 gen_load_gpr(t1, reg); \
10892 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10896 DECR_AND_STORE(31);
10901 DECR_AND_STORE(30);
10904 DECR_AND_STORE(23);
10907 DECR_AND_STORE(22);
10910 DECR_AND_STORE(21);
10913 DECR_AND_STORE(20);
10916 DECR_AND_STORE(19);
10919 DECR_AND_STORE(18);
10923 DECR_AND_STORE(17);
10926 DECR_AND_STORE(16);
10956 generate_exception_end(ctx
, EXCP_RI
);
10972 #undef DECR_AND_STORE
10974 tcg_gen_movi_tl(t2
, -framesize
);
10975 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
10981 static void gen_mips16_restore (DisasContext
*ctx
,
10982 int xsregs
, int aregs
,
10983 int do_ra
, int do_s0
, int do_s1
,
10987 TCGv t0
= tcg_temp_new();
10988 TCGv t1
= tcg_temp_new();
10989 TCGv t2
= tcg_temp_new();
10991 tcg_gen_movi_tl(t2
, framesize
);
10992 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
10994 #define DECR_AND_LOAD(reg) do { \
10995 tcg_gen_movi_tl(t2, -4); \
10996 gen_op_addr_add(ctx, t0, t0, t2); \
10997 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
10998 gen_store_gpr(t1, reg); \
11062 generate_exception_end(ctx
, EXCP_RI
);
11078 #undef DECR_AND_LOAD
11080 tcg_gen_movi_tl(t2
, framesize
);
11081 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11087 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11088 int is_64_bit
, int extended
)
11092 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11093 generate_exception_end(ctx
, EXCP_RI
);
11097 t0
= tcg_temp_new();
11099 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11100 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11102 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11108 #if defined(TARGET_MIPS64)
11109 static void decode_i64_mips16 (DisasContext
*ctx
,
11110 int ry
, int funct
, int16_t offset
,
11115 check_insn(ctx
, ISA_MIPS3
);
11116 check_mips_64(ctx
);
11117 offset
= extended
? offset
: offset
<< 3;
11118 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11121 check_insn(ctx
, ISA_MIPS3
);
11122 check_mips_64(ctx
);
11123 offset
= extended
? offset
: offset
<< 3;
11124 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11127 check_insn(ctx
, ISA_MIPS3
);
11128 check_mips_64(ctx
);
11129 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11130 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11133 check_insn(ctx
, ISA_MIPS3
);
11134 check_mips_64(ctx
);
11135 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11136 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11139 check_insn(ctx
, ISA_MIPS3
);
11140 check_mips_64(ctx
);
11141 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11142 generate_exception_end(ctx
, EXCP_RI
);
11144 offset
= extended
? offset
: offset
<< 3;
11145 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11149 check_insn(ctx
, ISA_MIPS3
);
11150 check_mips_64(ctx
);
11151 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11152 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11155 check_insn(ctx
, ISA_MIPS3
);
11156 check_mips_64(ctx
);
11157 offset
= extended
? offset
: offset
<< 2;
11158 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11161 check_insn(ctx
, ISA_MIPS3
);
11162 check_mips_64(ctx
);
11163 offset
= extended
? offset
: offset
<< 2;
11164 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11170 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11172 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11173 int op
, rx
, ry
, funct
, sa
;
11174 int16_t imm
, offset
;
11176 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11177 op
= (ctx
->opcode
>> 11) & 0x1f;
11178 sa
= (ctx
->opcode
>> 22) & 0x1f;
11179 funct
= (ctx
->opcode
>> 8) & 0x7;
11180 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11181 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11182 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11183 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11184 | (ctx
->opcode
& 0x1f));
11186 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11189 case M16_OPC_ADDIUSP
:
11190 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11192 case M16_OPC_ADDIUPC
:
11193 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11196 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11197 /* No delay slot, so just process as a normal instruction */
11200 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11201 /* No delay slot, so just process as a normal instruction */
11203 case M16_OPC_BNEQZ
:
11204 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11205 /* No delay slot, so just process as a normal instruction */
11207 case M16_OPC_SHIFT
:
11208 switch (ctx
->opcode
& 0x3) {
11210 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11213 #if defined(TARGET_MIPS64)
11214 check_mips_64(ctx
);
11215 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11217 generate_exception_end(ctx
, EXCP_RI
);
11221 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11224 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11228 #if defined(TARGET_MIPS64)
11230 check_insn(ctx
, ISA_MIPS3
);
11231 check_mips_64(ctx
);
11232 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11236 imm
= ctx
->opcode
& 0xf;
11237 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11238 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11239 imm
= (int16_t) (imm
<< 1) >> 1;
11240 if ((ctx
->opcode
>> 4) & 0x1) {
11241 #if defined(TARGET_MIPS64)
11242 check_mips_64(ctx
);
11243 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11245 generate_exception_end(ctx
, EXCP_RI
);
11248 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11251 case M16_OPC_ADDIU8
:
11252 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11255 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11257 case M16_OPC_SLTIU
:
11258 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11263 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11266 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11269 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11272 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11275 check_insn(ctx
, ISA_MIPS32
);
11277 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11278 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11279 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11280 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11281 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11282 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11283 | (ctx
->opcode
& 0xf)) << 3;
11285 if (ctx
->opcode
& (1 << 7)) {
11286 gen_mips16_save(ctx
, xsregs
, aregs
,
11287 do_ra
, do_s0
, do_s1
,
11290 gen_mips16_restore(ctx
, xsregs
, aregs
,
11291 do_ra
, do_s0
, do_s1
,
11297 generate_exception_end(ctx
, EXCP_RI
);
11302 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11305 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11307 #if defined(TARGET_MIPS64)
11309 check_insn(ctx
, ISA_MIPS3
);
11310 check_mips_64(ctx
);
11311 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11315 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11318 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11321 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11324 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11327 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11330 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11333 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11335 #if defined(TARGET_MIPS64)
11337 check_insn(ctx
, ISA_MIPS3
);
11338 check_mips_64(ctx
);
11339 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11343 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11346 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11349 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11352 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11354 #if defined(TARGET_MIPS64)
11356 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11360 generate_exception_end(ctx
, EXCP_RI
);
11367 static inline bool is_uhi(int sdbbp_code
)
11369 #ifdef CONFIG_USER_ONLY
11372 return semihosting_enabled() && sdbbp_code
== 1;
11376 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11380 int op
, cnvt_op
, op1
, offset
;
11384 op
= (ctx
->opcode
>> 11) & 0x1f;
11385 sa
= (ctx
->opcode
>> 2) & 0x7;
11386 sa
= sa
== 0 ? 8 : sa
;
11387 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11388 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11389 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11390 op1
= offset
= ctx
->opcode
& 0x1f;
11395 case M16_OPC_ADDIUSP
:
11397 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11399 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11402 case M16_OPC_ADDIUPC
:
11403 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11406 offset
= (ctx
->opcode
& 0x7ff) << 1;
11407 offset
= (int16_t)(offset
<< 4) >> 4;
11408 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11409 /* No delay slot, so just process as a normal instruction */
11412 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11413 offset
= (((ctx
->opcode
& 0x1f) << 21)
11414 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11416 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11417 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11421 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11422 ((int8_t)ctx
->opcode
) << 1, 0);
11423 /* No delay slot, so just process as a normal instruction */
11425 case M16_OPC_BNEQZ
:
11426 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11427 ((int8_t)ctx
->opcode
) << 1, 0);
11428 /* No delay slot, so just process as a normal instruction */
11430 case M16_OPC_SHIFT
:
11431 switch (ctx
->opcode
& 0x3) {
11433 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11436 #if defined(TARGET_MIPS64)
11437 check_insn(ctx
, ISA_MIPS3
);
11438 check_mips_64(ctx
);
11439 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11441 generate_exception_end(ctx
, EXCP_RI
);
11445 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11448 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11452 #if defined(TARGET_MIPS64)
11454 check_insn(ctx
, ISA_MIPS3
);
11455 check_mips_64(ctx
);
11456 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11461 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11463 if ((ctx
->opcode
>> 4) & 1) {
11464 #if defined(TARGET_MIPS64)
11465 check_insn(ctx
, ISA_MIPS3
);
11466 check_mips_64(ctx
);
11467 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11469 generate_exception_end(ctx
, EXCP_RI
);
11472 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11476 case M16_OPC_ADDIU8
:
11478 int16_t imm
= (int8_t) ctx
->opcode
;
11480 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11485 int16_t imm
= (uint8_t) ctx
->opcode
;
11486 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11489 case M16_OPC_SLTIU
:
11491 int16_t imm
= (uint8_t) ctx
->opcode
;
11492 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11499 funct
= (ctx
->opcode
>> 8) & 0x7;
11502 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11503 ((int8_t)ctx
->opcode
) << 1, 0);
11506 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11507 ((int8_t)ctx
->opcode
) << 1, 0);
11510 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11513 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11514 ((int8_t)ctx
->opcode
) << 3);
11517 check_insn(ctx
, ISA_MIPS32
);
11519 int do_ra
= ctx
->opcode
& (1 << 6);
11520 int do_s0
= ctx
->opcode
& (1 << 5);
11521 int do_s1
= ctx
->opcode
& (1 << 4);
11522 int framesize
= ctx
->opcode
& 0xf;
11524 if (framesize
== 0) {
11527 framesize
= framesize
<< 3;
11530 if (ctx
->opcode
& (1 << 7)) {
11531 gen_mips16_save(ctx
, 0, 0,
11532 do_ra
, do_s0
, do_s1
, framesize
);
11534 gen_mips16_restore(ctx
, 0, 0,
11535 do_ra
, do_s0
, do_s1
, framesize
);
11541 int rz
= xlat(ctx
->opcode
& 0x7);
11543 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11544 ((ctx
->opcode
>> 5) & 0x7);
11545 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11549 reg32
= ctx
->opcode
& 0x1f;
11550 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11553 generate_exception_end(ctx
, EXCP_RI
);
11560 int16_t imm
= (uint8_t) ctx
->opcode
;
11562 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11567 int16_t imm
= (uint8_t) ctx
->opcode
;
11568 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11571 #if defined(TARGET_MIPS64)
11573 check_insn(ctx
, ISA_MIPS3
);
11574 check_mips_64(ctx
);
11575 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11579 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11582 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11585 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11588 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11591 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11594 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11597 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11599 #if defined (TARGET_MIPS64)
11601 check_insn(ctx
, ISA_MIPS3
);
11602 check_mips_64(ctx
);
11603 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11607 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11610 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11613 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11616 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11620 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11623 switch (ctx
->opcode
& 0x3) {
11625 mips32_op
= OPC_ADDU
;
11628 mips32_op
= OPC_SUBU
;
11630 #if defined(TARGET_MIPS64)
11632 mips32_op
= OPC_DADDU
;
11633 check_insn(ctx
, ISA_MIPS3
);
11634 check_mips_64(ctx
);
11637 mips32_op
= OPC_DSUBU
;
11638 check_insn(ctx
, ISA_MIPS3
);
11639 check_mips_64(ctx
);
11643 generate_exception_end(ctx
, EXCP_RI
);
11647 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11656 int nd
= (ctx
->opcode
>> 7) & 0x1;
11657 int link
= (ctx
->opcode
>> 6) & 0x1;
11658 int ra
= (ctx
->opcode
>> 5) & 0x1;
11661 check_insn(ctx
, ISA_MIPS32
);
11670 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11675 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11676 gen_helper_do_semihosting(cpu_env
);
11678 /* XXX: not clear which exception should be raised
11679 * when in debug mode...
11681 check_insn(ctx
, ISA_MIPS32
);
11682 generate_exception_end(ctx
, EXCP_DBp
);
11686 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11689 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11692 generate_exception_end(ctx
, EXCP_BREAK
);
11695 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11698 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11701 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11703 #if defined (TARGET_MIPS64)
11705 check_insn(ctx
, ISA_MIPS3
);
11706 check_mips_64(ctx
);
11707 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11711 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11714 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11717 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11720 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11723 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11726 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11729 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11732 check_insn(ctx
, ISA_MIPS32
);
11734 case RR_RY_CNVT_ZEB
:
11735 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11737 case RR_RY_CNVT_ZEH
:
11738 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11740 case RR_RY_CNVT_SEB
:
11741 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11743 case RR_RY_CNVT_SEH
:
11744 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11746 #if defined (TARGET_MIPS64)
11747 case RR_RY_CNVT_ZEW
:
11748 check_insn(ctx
, ISA_MIPS64
);
11749 check_mips_64(ctx
);
11750 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11752 case RR_RY_CNVT_SEW
:
11753 check_insn(ctx
, ISA_MIPS64
);
11754 check_mips_64(ctx
);
11755 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11759 generate_exception_end(ctx
, EXCP_RI
);
11764 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11766 #if defined (TARGET_MIPS64)
11768 check_insn(ctx
, ISA_MIPS3
);
11769 check_mips_64(ctx
);
11770 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11773 check_insn(ctx
, ISA_MIPS3
);
11774 check_mips_64(ctx
);
11775 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11778 check_insn(ctx
, ISA_MIPS3
);
11779 check_mips_64(ctx
);
11780 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11783 check_insn(ctx
, ISA_MIPS3
);
11784 check_mips_64(ctx
);
11785 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11789 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11792 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11795 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11798 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11800 #if defined (TARGET_MIPS64)
11802 check_insn(ctx
, ISA_MIPS3
);
11803 check_mips_64(ctx
);
11804 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11807 check_insn(ctx
, ISA_MIPS3
);
11808 check_mips_64(ctx
);
11809 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11812 check_insn(ctx
, ISA_MIPS3
);
11813 check_mips_64(ctx
);
11814 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11817 check_insn(ctx
, ISA_MIPS3
);
11818 check_mips_64(ctx
);
11819 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11823 generate_exception_end(ctx
, EXCP_RI
);
11827 case M16_OPC_EXTEND
:
11828 decode_extended_mips16_opc(env
, ctx
);
11831 #if defined(TARGET_MIPS64)
11833 funct
= (ctx
->opcode
>> 8) & 0x7;
11834 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11838 generate_exception_end(ctx
, EXCP_RI
);
11845 /* microMIPS extension to MIPS32/MIPS64 */
11848 * microMIPS32/microMIPS64 major opcodes
11850 * 1. MIPS Architecture for Programmers Volume II-B:
11851 * The microMIPS32 Instruction Set (Revision 3.05)
11853 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11855 * 2. MIPS Architecture For Programmers Volume II-A:
11856 * The MIPS64 Instruction Set (Revision 3.51)
11886 POOL32S
= 0x16, /* MIPS64 */
11887 DADDIU32
= 0x17, /* MIPS64 */
11916 /* 0x29 is reserved */
11929 /* 0x31 is reserved */
11942 SD32
= 0x36, /* MIPS64 */
11943 LD32
= 0x37, /* MIPS64 */
11945 /* 0x39 is reserved */
11961 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11971 /* POOL32A encoding of minor opcode field */
11974 /* These opcodes are distinguished only by bits 9..6; those bits are
11975 * what are recorded below. */
12011 /* The following can be distinguished by their lower 6 bits. */
12019 /* POOL32AXF encoding of minor opcode field extension */
12022 * 1. MIPS Architecture for Programmers Volume II-B:
12023 * The microMIPS32 Instruction Set (Revision 3.05)
12025 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12027 * 2. MIPS Architecture for Programmers VolumeIV-e:
12028 * The MIPS DSP Application-Specific Extension
12029 * to the microMIPS32 Architecture (Revision 2.34)
12031 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12046 /* begin of microMIPS32 DSP */
12048 /* bits 13..12 for 0x01 */
12054 /* bits 13..12 for 0x2a */
12060 /* bits 13..12 for 0x32 */
12064 /* end of microMIPS32 DSP */
12066 /* bits 15..12 for 0x2c */
12083 /* bits 15..12 for 0x34 */
12091 /* bits 15..12 for 0x3c */
12093 JR
= 0x0, /* alias */
12101 /* bits 15..12 for 0x05 */
12105 /* bits 15..12 for 0x0d */
12117 /* bits 15..12 for 0x15 */
12123 /* bits 15..12 for 0x1d */
12127 /* bits 15..12 for 0x2d */
12132 /* bits 15..12 for 0x35 */
12139 /* POOL32B encoding of minor opcode field (bits 15..12) */
12155 /* POOL32C encoding of minor opcode field (bits 15..12) */
12163 /* 0xa is reserved */
12170 /* 0x6 is reserved */
12176 /* POOL32F encoding of minor opcode field (bits 5..0) */
12179 /* These are the bit 7..6 values */
12188 /* These are the bit 8..6 values */
12213 MOVZ_FMT_05
= 0x05,
12247 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12254 /* POOL32Fxf encoding of minor opcode extension field */
12292 /* POOL32I encoding of minor opcode field (bits 25..21) */
12322 /* These overlap and are distinguished by bit16 of the instruction */
12331 /* POOL16A encoding of minor opcode field */
12338 /* POOL16B encoding of minor opcode field */
12345 /* POOL16C encoding of minor opcode field */
12365 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12385 /* POOL16D encoding of minor opcode field */
12392 /* POOL16E encoding of minor opcode field */
12399 static int mmreg (int r
)
12401 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12406 /* Used for 16-bit store instructions. */
12407 static int mmreg2 (int r
)
12409 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12414 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12415 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12416 #define uMIPS_RS2(op) uMIPS_RS(op)
12417 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12418 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12419 #define uMIPS_RS5(op) (op & 0x1f)
12421 /* Signed immediate */
12422 #define SIMM(op, start, width) \
12423 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12426 /* Zero-extended immediate */
12427 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12429 static void gen_addiur1sp(DisasContext
*ctx
)
12431 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12433 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12436 static void gen_addiur2(DisasContext
*ctx
)
12438 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12439 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12440 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12442 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12445 static void gen_addiusp(DisasContext
*ctx
)
12447 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12450 if (encoded
<= 1) {
12451 decoded
= 256 + encoded
;
12452 } else if (encoded
<= 255) {
12454 } else if (encoded
<= 509) {
12455 decoded
= encoded
- 512;
12457 decoded
= encoded
- 768;
12460 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12463 static void gen_addius5(DisasContext
*ctx
)
12465 int imm
= SIMM(ctx
->opcode
, 1, 4);
12466 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12468 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12471 static void gen_andi16(DisasContext
*ctx
)
12473 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12474 31, 32, 63, 64, 255, 32768, 65535 };
12475 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12476 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12477 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12479 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12482 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12483 int base
, int16_t offset
)
12488 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12489 generate_exception_end(ctx
, EXCP_RI
);
12493 t0
= tcg_temp_new();
12495 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12497 t1
= tcg_const_tl(reglist
);
12498 t2
= tcg_const_i32(ctx
->mem_idx
);
12500 save_cpu_state(ctx
, 1);
12503 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12506 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12508 #ifdef TARGET_MIPS64
12510 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12513 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12519 tcg_temp_free_i32(t2
);
12523 static void gen_pool16c_insn(DisasContext
*ctx
)
12525 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12526 int rs
= mmreg(ctx
->opcode
& 0x7);
12528 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12533 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12539 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12545 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12551 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12558 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12559 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12561 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12570 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12571 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12573 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12580 int reg
= ctx
->opcode
& 0x1f;
12582 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12588 int reg
= ctx
->opcode
& 0x1f;
12589 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12590 /* Let normal delay slot handling in our caller take us
12591 to the branch target. */
12596 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12597 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12601 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12602 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12606 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12610 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12613 generate_exception_end(ctx
, EXCP_BREAK
);
12616 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12617 gen_helper_do_semihosting(cpu_env
);
12619 /* XXX: not clear which exception should be raised
12620 * when in debug mode...
12622 check_insn(ctx
, ISA_MIPS32
);
12623 generate_exception_end(ctx
, EXCP_DBp
);
12626 case JRADDIUSP
+ 0:
12627 case JRADDIUSP
+ 1:
12629 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12630 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12631 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12632 /* Let normal delay slot handling in our caller take us
12633 to the branch target. */
12637 generate_exception_end(ctx
, EXCP_RI
);
12642 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12645 int rd
, rs
, re
, rt
;
12646 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12647 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12648 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12649 rd
= rd_enc
[enc_dest
];
12650 re
= re_enc
[enc_dest
];
12651 rs
= rs_rt_enc
[enc_rs
];
12652 rt
= rs_rt_enc
[enc_rt
];
12654 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12656 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12659 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12661 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12665 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12667 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12668 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12670 switch (ctx
->opcode
& 0xf) {
12672 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12675 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12679 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12680 int offset
= extract32(ctx
->opcode
, 4, 4);
12681 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12684 case R6_JRC16
: /* JRCADDIUSP */
12685 if ((ctx
->opcode
>> 4) & 1) {
12687 int imm
= extract32(ctx
->opcode
, 5, 5);
12688 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12689 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12692 int rs
= extract32(ctx
->opcode
, 5, 5);
12693 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12696 case MOVEP
... MOVEP_07
:
12697 case MOVEP_0C
... MOVEP_0F
:
12699 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12700 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12701 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12702 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12706 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12709 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12713 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12714 int offset
= extract32(ctx
->opcode
, 4, 4);
12715 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12718 case JALRC16
: /* BREAK16, SDBBP16 */
12719 switch (ctx
->opcode
& 0x3f) {
12721 case JALRC16
+ 0x20:
12723 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12728 generate_exception(ctx
, EXCP_BREAK
);
12732 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12733 gen_helper_do_semihosting(cpu_env
);
12735 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12736 generate_exception(ctx
, EXCP_RI
);
12738 generate_exception(ctx
, EXCP_DBp
);
12745 generate_exception(ctx
, EXCP_RI
);
12750 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12752 TCGv t0
= tcg_temp_new();
12753 TCGv t1
= tcg_temp_new();
12755 gen_load_gpr(t0
, base
);
12758 gen_load_gpr(t1
, index
);
12759 tcg_gen_shli_tl(t1
, t1
, 2);
12760 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12763 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12764 gen_store_gpr(t1
, rd
);
12770 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12771 int base
, int16_t offset
)
12775 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12776 generate_exception_end(ctx
, EXCP_RI
);
12780 t0
= tcg_temp_new();
12781 t1
= tcg_temp_new();
12783 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12788 generate_exception_end(ctx
, EXCP_RI
);
12791 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12792 gen_store_gpr(t1
, rd
);
12793 tcg_gen_movi_tl(t1
, 4);
12794 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12795 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12796 gen_store_gpr(t1
, rd
+1);
12799 gen_load_gpr(t1
, rd
);
12800 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12801 tcg_gen_movi_tl(t1
, 4);
12802 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12803 gen_load_gpr(t1
, rd
+1);
12804 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12806 #ifdef TARGET_MIPS64
12809 generate_exception_end(ctx
, EXCP_RI
);
12812 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12813 gen_store_gpr(t1
, rd
);
12814 tcg_gen_movi_tl(t1
, 8);
12815 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12816 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12817 gen_store_gpr(t1
, rd
+1);
12820 gen_load_gpr(t1
, rd
);
12821 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12822 tcg_gen_movi_tl(t1
, 8);
12823 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12824 gen_load_gpr(t1
, rd
+1);
12825 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12833 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12835 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12836 int minor
= (ctx
->opcode
>> 12) & 0xf;
12837 uint32_t mips32_op
;
12839 switch (extension
) {
12841 mips32_op
= OPC_TEQ
;
12844 mips32_op
= OPC_TGE
;
12847 mips32_op
= OPC_TGEU
;
12850 mips32_op
= OPC_TLT
;
12853 mips32_op
= OPC_TLTU
;
12856 mips32_op
= OPC_TNE
;
12858 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12860 #ifndef CONFIG_USER_ONLY
12863 check_cp0_enabled(ctx
);
12865 /* Treat as NOP. */
12868 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12872 check_cp0_enabled(ctx
);
12874 TCGv t0
= tcg_temp_new();
12876 gen_load_gpr(t0
, rt
);
12877 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12883 switch (minor
& 3) {
12885 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12888 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12891 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12894 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12897 goto pool32axf_invalid
;
12901 switch (minor
& 3) {
12903 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12906 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12909 goto pool32axf_invalid
;
12915 check_insn(ctx
, ISA_MIPS32R6
);
12916 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12919 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12922 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12925 mips32_op
= OPC_CLO
;
12928 mips32_op
= OPC_CLZ
;
12930 check_insn(ctx
, ISA_MIPS32
);
12931 gen_cl(ctx
, mips32_op
, rt
, rs
);
12934 gen_rdhwr(ctx
, rt
, rs
);
12937 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12940 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12941 mips32_op
= OPC_MULT
;
12944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12945 mips32_op
= OPC_MULTU
;
12948 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12949 mips32_op
= OPC_DIV
;
12952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12953 mips32_op
= OPC_DIVU
;
12956 check_insn(ctx
, ISA_MIPS32
);
12957 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12960 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12961 mips32_op
= OPC_MADD
;
12964 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12965 mips32_op
= OPC_MADDU
;
12968 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12969 mips32_op
= OPC_MSUB
;
12972 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12973 mips32_op
= OPC_MSUBU
;
12975 check_insn(ctx
, ISA_MIPS32
);
12976 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12979 goto pool32axf_invalid
;
12990 generate_exception_err(ctx
, EXCP_CpU
, 2);
12993 goto pool32axf_invalid
;
12998 case JALR
: /* JALRC */
12999 case JALR_HB
: /* JALRC_HB */
13000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13001 /* JALRC, JALRC_HB */
13002 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13004 /* JALR, JALR_HB */
13005 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13006 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13012 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13013 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13016 goto pool32axf_invalid
;
13022 check_cp0_enabled(ctx
);
13023 check_insn(ctx
, ISA_MIPS32R2
);
13024 gen_load_srsgpr(rs
, rt
);
13027 check_cp0_enabled(ctx
);
13028 check_insn(ctx
, ISA_MIPS32R2
);
13029 gen_store_srsgpr(rs
, rt
);
13032 goto pool32axf_invalid
;
13035 #ifndef CONFIG_USER_ONLY
13039 mips32_op
= OPC_TLBP
;
13042 mips32_op
= OPC_TLBR
;
13045 mips32_op
= OPC_TLBWI
;
13048 mips32_op
= OPC_TLBWR
;
13051 mips32_op
= OPC_TLBINV
;
13054 mips32_op
= OPC_TLBINVF
;
13057 mips32_op
= OPC_WAIT
;
13060 mips32_op
= OPC_DERET
;
13063 mips32_op
= OPC_ERET
;
13065 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13068 goto pool32axf_invalid
;
13074 check_cp0_enabled(ctx
);
13076 TCGv t0
= tcg_temp_new();
13078 save_cpu_state(ctx
, 1);
13079 gen_helper_di(t0
, cpu_env
);
13080 gen_store_gpr(t0
, rs
);
13081 /* Stop translation as we may have switched the execution mode */
13082 ctx
->bstate
= BS_STOP
;
13087 check_cp0_enabled(ctx
);
13089 TCGv t0
= tcg_temp_new();
13091 save_cpu_state(ctx
, 1);
13092 gen_helper_ei(t0
, cpu_env
);
13093 gen_store_gpr(t0
, rs
);
13094 /* Stop translation as we may have switched the execution mode */
13095 ctx
->bstate
= BS_STOP
;
13100 goto pool32axf_invalid
;
13110 generate_exception_end(ctx
, EXCP_SYSCALL
);
13113 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13114 gen_helper_do_semihosting(cpu_env
);
13116 check_insn(ctx
, ISA_MIPS32
);
13117 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13118 generate_exception_end(ctx
, EXCP_RI
);
13120 generate_exception_end(ctx
, EXCP_DBp
);
13125 goto pool32axf_invalid
;
13129 switch (minor
& 3) {
13131 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13134 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13137 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13140 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13143 goto pool32axf_invalid
;
13147 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13150 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13153 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13156 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13159 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13162 goto pool32axf_invalid
;
13167 MIPS_INVAL("pool32axf");
13168 generate_exception_end(ctx
, EXCP_RI
);
13173 /* Values for microMIPS fmt field. Variable-width, depending on which
13174 formats the instruction supports. */
13193 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13195 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13196 uint32_t mips32_op
;
13198 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13199 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13200 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13202 switch (extension
) {
13203 case FLOAT_1BIT_FMT(CFC1
, 0):
13204 mips32_op
= OPC_CFC1
;
13206 case FLOAT_1BIT_FMT(CTC1
, 0):
13207 mips32_op
= OPC_CTC1
;
13209 case FLOAT_1BIT_FMT(MFC1
, 0):
13210 mips32_op
= OPC_MFC1
;
13212 case FLOAT_1BIT_FMT(MTC1
, 0):
13213 mips32_op
= OPC_MTC1
;
13215 case FLOAT_1BIT_FMT(MFHC1
, 0):
13216 mips32_op
= OPC_MFHC1
;
13218 case FLOAT_1BIT_FMT(MTHC1
, 0):
13219 mips32_op
= OPC_MTHC1
;
13221 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13224 /* Reciprocal square root */
13225 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13226 mips32_op
= OPC_RSQRT_S
;
13228 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13229 mips32_op
= OPC_RSQRT_D
;
13233 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13234 mips32_op
= OPC_SQRT_S
;
13236 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13237 mips32_op
= OPC_SQRT_D
;
13241 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13242 mips32_op
= OPC_RECIP_S
;
13244 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13245 mips32_op
= OPC_RECIP_D
;
13249 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13250 mips32_op
= OPC_FLOOR_L_S
;
13252 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13253 mips32_op
= OPC_FLOOR_L_D
;
13255 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13256 mips32_op
= OPC_FLOOR_W_S
;
13258 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13259 mips32_op
= OPC_FLOOR_W_D
;
13263 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13264 mips32_op
= OPC_CEIL_L_S
;
13266 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13267 mips32_op
= OPC_CEIL_L_D
;
13269 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13270 mips32_op
= OPC_CEIL_W_S
;
13272 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13273 mips32_op
= OPC_CEIL_W_D
;
13277 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13278 mips32_op
= OPC_TRUNC_L_S
;
13280 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13281 mips32_op
= OPC_TRUNC_L_D
;
13283 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13284 mips32_op
= OPC_TRUNC_W_S
;
13286 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13287 mips32_op
= OPC_TRUNC_W_D
;
13291 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13292 mips32_op
= OPC_ROUND_L_S
;
13294 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13295 mips32_op
= OPC_ROUND_L_D
;
13297 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13298 mips32_op
= OPC_ROUND_W_S
;
13300 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13301 mips32_op
= OPC_ROUND_W_D
;
13304 /* Integer to floating-point conversion */
13305 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13306 mips32_op
= OPC_CVT_L_S
;
13308 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13309 mips32_op
= OPC_CVT_L_D
;
13311 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13312 mips32_op
= OPC_CVT_W_S
;
13314 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13315 mips32_op
= OPC_CVT_W_D
;
13318 /* Paired-foo conversions */
13319 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13320 mips32_op
= OPC_CVT_S_PL
;
13322 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13323 mips32_op
= OPC_CVT_S_PU
;
13325 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13326 mips32_op
= OPC_CVT_PW_PS
;
13328 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13329 mips32_op
= OPC_CVT_PS_PW
;
13332 /* Floating-point moves */
13333 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13334 mips32_op
= OPC_MOV_S
;
13336 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13337 mips32_op
= OPC_MOV_D
;
13339 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13340 mips32_op
= OPC_MOV_PS
;
13343 /* Absolute value */
13344 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13345 mips32_op
= OPC_ABS_S
;
13347 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13348 mips32_op
= OPC_ABS_D
;
13350 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13351 mips32_op
= OPC_ABS_PS
;
13355 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13356 mips32_op
= OPC_NEG_S
;
13358 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13359 mips32_op
= OPC_NEG_D
;
13361 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13362 mips32_op
= OPC_NEG_PS
;
13365 /* Reciprocal square root step */
13366 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13367 mips32_op
= OPC_RSQRT1_S
;
13369 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13370 mips32_op
= OPC_RSQRT1_D
;
13372 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13373 mips32_op
= OPC_RSQRT1_PS
;
13376 /* Reciprocal step */
13377 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13378 mips32_op
= OPC_RECIP1_S
;
13380 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13381 mips32_op
= OPC_RECIP1_S
;
13383 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13384 mips32_op
= OPC_RECIP1_PS
;
13387 /* Conversions from double */
13388 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13389 mips32_op
= OPC_CVT_D_S
;
13391 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13392 mips32_op
= OPC_CVT_D_W
;
13394 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13395 mips32_op
= OPC_CVT_D_L
;
13398 /* Conversions from single */
13399 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13400 mips32_op
= OPC_CVT_S_D
;
13402 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13403 mips32_op
= OPC_CVT_S_W
;
13405 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13406 mips32_op
= OPC_CVT_S_L
;
13408 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13411 /* Conditional moves on floating-point codes */
13412 case COND_FLOAT_MOV(MOVT
, 0):
13413 case COND_FLOAT_MOV(MOVT
, 1):
13414 case COND_FLOAT_MOV(MOVT
, 2):
13415 case COND_FLOAT_MOV(MOVT
, 3):
13416 case COND_FLOAT_MOV(MOVT
, 4):
13417 case COND_FLOAT_MOV(MOVT
, 5):
13418 case COND_FLOAT_MOV(MOVT
, 6):
13419 case COND_FLOAT_MOV(MOVT
, 7):
13420 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13421 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13423 case COND_FLOAT_MOV(MOVF
, 0):
13424 case COND_FLOAT_MOV(MOVF
, 1):
13425 case COND_FLOAT_MOV(MOVF
, 2):
13426 case COND_FLOAT_MOV(MOVF
, 3):
13427 case COND_FLOAT_MOV(MOVF
, 4):
13428 case COND_FLOAT_MOV(MOVF
, 5):
13429 case COND_FLOAT_MOV(MOVF
, 6):
13430 case COND_FLOAT_MOV(MOVF
, 7):
13431 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13432 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13435 MIPS_INVAL("pool32fxf");
13436 generate_exception_end(ctx
, EXCP_RI
);
13441 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13445 int rt
, rs
, rd
, rr
;
13447 uint32_t op
, minor
, mips32_op
;
13448 uint32_t cond
, fmt
, cc
;
13450 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13451 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13453 rt
= (ctx
->opcode
>> 21) & 0x1f;
13454 rs
= (ctx
->opcode
>> 16) & 0x1f;
13455 rd
= (ctx
->opcode
>> 11) & 0x1f;
13456 rr
= (ctx
->opcode
>> 6) & 0x1f;
13457 imm
= (int16_t) ctx
->opcode
;
13459 op
= (ctx
->opcode
>> 26) & 0x3f;
13462 minor
= ctx
->opcode
& 0x3f;
13465 minor
= (ctx
->opcode
>> 6) & 0xf;
13468 mips32_op
= OPC_SLL
;
13471 mips32_op
= OPC_SRA
;
13474 mips32_op
= OPC_SRL
;
13477 mips32_op
= OPC_ROTR
;
13479 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13482 check_insn(ctx
, ISA_MIPS32R6
);
13483 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13486 check_insn(ctx
, ISA_MIPS32R6
);
13487 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13490 goto pool32a_invalid
;
13494 minor
= (ctx
->opcode
>> 6) & 0xf;
13498 mips32_op
= OPC_ADD
;
13501 mips32_op
= OPC_ADDU
;
13504 mips32_op
= OPC_SUB
;
13507 mips32_op
= OPC_SUBU
;
13510 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13511 mips32_op
= OPC_MUL
;
13513 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13517 mips32_op
= OPC_SLLV
;
13520 mips32_op
= OPC_SRLV
;
13523 mips32_op
= OPC_SRAV
;
13526 mips32_op
= OPC_ROTRV
;
13528 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13530 /* Logical operations */
13532 mips32_op
= OPC_AND
;
13535 mips32_op
= OPC_OR
;
13538 mips32_op
= OPC_NOR
;
13541 mips32_op
= OPC_XOR
;
13543 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13545 /* Set less than */
13547 mips32_op
= OPC_SLT
;
13550 mips32_op
= OPC_SLTU
;
13552 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13555 goto pool32a_invalid
;
13559 minor
= (ctx
->opcode
>> 6) & 0xf;
13561 /* Conditional moves */
13562 case MOVN
: /* MUL */
13563 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13565 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13568 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13571 case MOVZ
: /* MUH */
13572 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13574 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13577 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13581 check_insn(ctx
, ISA_MIPS32R6
);
13582 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13585 check_insn(ctx
, ISA_MIPS32R6
);
13586 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13588 case LWXS
: /* DIV */
13589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13591 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13594 gen_ldxs(ctx
, rs
, rt
, rd
);
13598 check_insn(ctx
, ISA_MIPS32R6
);
13599 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13602 check_insn(ctx
, ISA_MIPS32R6
);
13603 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13606 check_insn(ctx
, ISA_MIPS32R6
);
13607 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13610 goto pool32a_invalid
;
13614 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13617 check_insn(ctx
, ISA_MIPS32R6
);
13618 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13619 extract32(ctx
->opcode
, 9, 2));
13622 check_insn(ctx
, ISA_MIPS32R6
);
13623 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13624 extract32(ctx
->opcode
, 9, 2));
13627 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13630 gen_pool32axf(env
, ctx
, rt
, rs
);
13633 generate_exception_end(ctx
, EXCP_BREAK
);
13637 MIPS_INVAL("pool32a");
13638 generate_exception_end(ctx
, EXCP_RI
);
13643 minor
= (ctx
->opcode
>> 12) & 0xf;
13646 check_cp0_enabled(ctx
);
13647 /* Treat as no-op. */
13651 /* COP2: Not implemented. */
13652 generate_exception_err(ctx
, EXCP_CpU
, 2);
13654 #ifdef TARGET_MIPS64
13657 check_insn(ctx
, ISA_MIPS3
);
13658 check_mips_64(ctx
);
13663 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13665 #ifdef TARGET_MIPS64
13668 check_insn(ctx
, ISA_MIPS3
);
13669 check_mips_64(ctx
);
13674 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13677 MIPS_INVAL("pool32b");
13678 generate_exception_end(ctx
, EXCP_RI
);
13683 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13684 minor
= ctx
->opcode
& 0x3f;
13685 check_cp1_enabled(ctx
);
13688 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13689 mips32_op
= OPC_ALNV_PS
;
13692 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13693 mips32_op
= OPC_MADD_S
;
13696 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13697 mips32_op
= OPC_MADD_D
;
13700 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13701 mips32_op
= OPC_MADD_PS
;
13704 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13705 mips32_op
= OPC_MSUB_S
;
13708 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13709 mips32_op
= OPC_MSUB_D
;
13712 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13713 mips32_op
= OPC_MSUB_PS
;
13716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13717 mips32_op
= OPC_NMADD_S
;
13720 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13721 mips32_op
= OPC_NMADD_D
;
13724 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13725 mips32_op
= OPC_NMADD_PS
;
13728 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13729 mips32_op
= OPC_NMSUB_S
;
13732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13733 mips32_op
= OPC_NMSUB_D
;
13736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13737 mips32_op
= OPC_NMSUB_PS
;
13739 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13741 case CABS_COND_FMT
:
13742 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13743 cond
= (ctx
->opcode
>> 6) & 0xf;
13744 cc
= (ctx
->opcode
>> 13) & 0x7;
13745 fmt
= (ctx
->opcode
>> 10) & 0x3;
13748 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13751 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13754 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13757 goto pool32f_invalid
;
13761 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13762 cond
= (ctx
->opcode
>> 6) & 0xf;
13763 cc
= (ctx
->opcode
>> 13) & 0x7;
13764 fmt
= (ctx
->opcode
>> 10) & 0x3;
13767 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13770 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13773 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13776 goto pool32f_invalid
;
13780 check_insn(ctx
, ISA_MIPS32R6
);
13781 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13784 check_insn(ctx
, ISA_MIPS32R6
);
13785 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13788 gen_pool32fxf(ctx
, rt
, rs
);
13792 switch ((ctx
->opcode
>> 6) & 0x7) {
13794 mips32_op
= OPC_PLL_PS
;
13797 mips32_op
= OPC_PLU_PS
;
13800 mips32_op
= OPC_PUL_PS
;
13803 mips32_op
= OPC_PUU_PS
;
13806 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13807 mips32_op
= OPC_CVT_PS_S
;
13809 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13812 goto pool32f_invalid
;
13816 check_insn(ctx
, ISA_MIPS32R6
);
13817 switch ((ctx
->opcode
>> 9) & 0x3) {
13819 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13822 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13825 goto pool32f_invalid
;
13830 switch ((ctx
->opcode
>> 6) & 0x7) {
13832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13833 mips32_op
= OPC_LWXC1
;
13836 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13837 mips32_op
= OPC_SWXC1
;
13840 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13841 mips32_op
= OPC_LDXC1
;
13844 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13845 mips32_op
= OPC_SDXC1
;
13848 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13849 mips32_op
= OPC_LUXC1
;
13852 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13853 mips32_op
= OPC_SUXC1
;
13855 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13858 goto pool32f_invalid
;
13862 check_insn(ctx
, ISA_MIPS32R6
);
13863 switch ((ctx
->opcode
>> 9) & 0x3) {
13865 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13868 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13871 goto pool32f_invalid
;
13876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13877 fmt
= (ctx
->opcode
>> 9) & 0x3;
13878 switch ((ctx
->opcode
>> 6) & 0x7) {
13882 mips32_op
= OPC_RSQRT2_S
;
13885 mips32_op
= OPC_RSQRT2_D
;
13888 mips32_op
= OPC_RSQRT2_PS
;
13891 goto pool32f_invalid
;
13897 mips32_op
= OPC_RECIP2_S
;
13900 mips32_op
= OPC_RECIP2_D
;
13903 mips32_op
= OPC_RECIP2_PS
;
13906 goto pool32f_invalid
;
13910 mips32_op
= OPC_ADDR_PS
;
13913 mips32_op
= OPC_MULR_PS
;
13915 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13918 goto pool32f_invalid
;
13922 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13923 cc
= (ctx
->opcode
>> 13) & 0x7;
13924 fmt
= (ctx
->opcode
>> 9) & 0x3;
13925 switch ((ctx
->opcode
>> 6) & 0x7) {
13926 case MOVF_FMT
: /* RINT_FMT */
13927 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13931 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13934 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13937 goto pool32f_invalid
;
13943 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13946 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13950 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13953 goto pool32f_invalid
;
13957 case MOVT_FMT
: /* CLASS_FMT */
13958 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13962 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
13965 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
13968 goto pool32f_invalid
;
13974 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
13977 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
13981 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
13984 goto pool32f_invalid
;
13989 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13992 goto pool32f_invalid
;
13995 #define FINSN_3ARG_SDPS(prfx) \
13996 switch ((ctx->opcode >> 8) & 0x3) { \
13998 mips32_op = OPC_##prfx##_S; \
14001 mips32_op = OPC_##prfx##_D; \
14003 case FMT_SDPS_PS: \
14005 mips32_op = OPC_##prfx##_PS; \
14008 goto pool32f_invalid; \
14011 check_insn(ctx
, ISA_MIPS32R6
);
14012 switch ((ctx
->opcode
>> 9) & 0x3) {
14014 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14017 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14020 goto pool32f_invalid
;
14024 check_insn(ctx
, ISA_MIPS32R6
);
14025 switch ((ctx
->opcode
>> 9) & 0x3) {
14027 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14030 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14033 goto pool32f_invalid
;
14037 /* regular FP ops */
14038 switch ((ctx
->opcode
>> 6) & 0x3) {
14040 FINSN_3ARG_SDPS(ADD
);
14043 FINSN_3ARG_SDPS(SUB
);
14046 FINSN_3ARG_SDPS(MUL
);
14049 fmt
= (ctx
->opcode
>> 8) & 0x3;
14051 mips32_op
= OPC_DIV_D
;
14052 } else if (fmt
== 0) {
14053 mips32_op
= OPC_DIV_S
;
14055 goto pool32f_invalid
;
14059 goto pool32f_invalid
;
14064 switch ((ctx
->opcode
>> 6) & 0x7) {
14065 case MOVN_FMT
: /* SELNEZ_FMT */
14066 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14068 switch ((ctx
->opcode
>> 9) & 0x3) {
14070 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14073 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14076 goto pool32f_invalid
;
14080 FINSN_3ARG_SDPS(MOVN
);
14084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14085 FINSN_3ARG_SDPS(MOVN
);
14087 case MOVZ_FMT
: /* SELEQZ_FMT */
14088 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14090 switch ((ctx
->opcode
>> 9) & 0x3) {
14092 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14095 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14098 goto pool32f_invalid
;
14102 FINSN_3ARG_SDPS(MOVZ
);
14106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14107 FINSN_3ARG_SDPS(MOVZ
);
14110 check_insn(ctx
, ISA_MIPS32R6
);
14111 switch ((ctx
->opcode
>> 9) & 0x3) {
14113 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14116 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14119 goto pool32f_invalid
;
14123 check_insn(ctx
, ISA_MIPS32R6
);
14124 switch ((ctx
->opcode
>> 9) & 0x3) {
14126 mips32_op
= OPC_MADDF_S
;
14129 mips32_op
= OPC_MADDF_D
;
14132 goto pool32f_invalid
;
14136 check_insn(ctx
, ISA_MIPS32R6
);
14137 switch ((ctx
->opcode
>> 9) & 0x3) {
14139 mips32_op
= OPC_MSUBF_S
;
14142 mips32_op
= OPC_MSUBF_D
;
14145 goto pool32f_invalid
;
14149 goto pool32f_invalid
;
14153 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14157 MIPS_INVAL("pool32f");
14158 generate_exception_end(ctx
, EXCP_RI
);
14162 generate_exception_err(ctx
, EXCP_CpU
, 1);
14166 minor
= (ctx
->opcode
>> 21) & 0x1f;
14169 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14170 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14173 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14174 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14175 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14178 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14179 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14180 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14183 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14184 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14187 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14188 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14189 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14193 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14194 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14197 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14198 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14202 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14206 case TLTI
: /* BC1EQZC */
14207 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14209 check_cp1_enabled(ctx
);
14210 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14213 mips32_op
= OPC_TLTI
;
14217 case TGEI
: /* BC1NEZC */
14218 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14220 check_cp1_enabled(ctx
);
14221 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14224 mips32_op
= OPC_TGEI
;
14229 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14230 mips32_op
= OPC_TLTIU
;
14233 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14234 mips32_op
= OPC_TGEIU
;
14236 case TNEI
: /* SYNCI */
14237 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14239 /* Break the TB to be able to sync copied instructions
14241 ctx
->bstate
= BS_STOP
;
14244 mips32_op
= OPC_TNEI
;
14249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14250 mips32_op
= OPC_TEQI
;
14252 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14257 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14258 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14259 4, rs
, 0, imm
<< 1, 0);
14260 /* Compact branches don't have a delay slot, so just let
14261 the normal delay slot handling take us to the branch
14265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14266 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14269 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14270 /* Break the TB to be able to sync copied instructions
14272 ctx
->bstate
= BS_STOP
;
14276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14277 /* COP2: Not implemented. */
14278 generate_exception_err(ctx
, EXCP_CpU
, 2);
14281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14282 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14285 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14286 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14290 mips32_op
= OPC_BC1FANY4
;
14293 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14294 mips32_op
= OPC_BC1TANY4
;
14297 check_insn(ctx
, ASE_MIPS3D
);
14300 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14301 check_cp1_enabled(ctx
);
14302 gen_compute_branch1(ctx
, mips32_op
,
14303 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14305 generate_exception_err(ctx
, EXCP_CpU
, 1);
14310 /* MIPS DSP: not implemented */
14313 MIPS_INVAL("pool32i");
14314 generate_exception_end(ctx
, EXCP_RI
);
14319 minor
= (ctx
->opcode
>> 12) & 0xf;
14320 offset
= sextract32(ctx
->opcode
, 0,
14321 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14325 mips32_op
= OPC_LWL
;
14328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14329 mips32_op
= OPC_SWL
;
14332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14333 mips32_op
= OPC_LWR
;
14336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14337 mips32_op
= OPC_SWR
;
14339 #if defined(TARGET_MIPS64)
14341 check_insn(ctx
, ISA_MIPS3
);
14342 check_mips_64(ctx
);
14343 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14344 mips32_op
= OPC_LDL
;
14347 check_insn(ctx
, ISA_MIPS3
);
14348 check_mips_64(ctx
);
14349 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14350 mips32_op
= OPC_SDL
;
14353 check_insn(ctx
, ISA_MIPS3
);
14354 check_mips_64(ctx
);
14355 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14356 mips32_op
= OPC_LDR
;
14359 check_insn(ctx
, ISA_MIPS3
);
14360 check_mips_64(ctx
);
14361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14362 mips32_op
= OPC_SDR
;
14365 check_insn(ctx
, ISA_MIPS3
);
14366 check_mips_64(ctx
);
14367 mips32_op
= OPC_LWU
;
14370 check_insn(ctx
, ISA_MIPS3
);
14371 check_mips_64(ctx
);
14372 mips32_op
= OPC_LLD
;
14376 mips32_op
= OPC_LL
;
14379 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14382 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14385 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14387 #if defined(TARGET_MIPS64)
14389 check_insn(ctx
, ISA_MIPS3
);
14390 check_mips_64(ctx
);
14391 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14395 /* Treat as no-op */
14396 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14397 /* hint codes 24-31 are reserved and signal RI */
14398 generate_exception(ctx
, EXCP_RI
);
14402 MIPS_INVAL("pool32c");
14403 generate_exception_end(ctx
, EXCP_RI
);
14407 case ADDI32
: /* AUI, LUI */
14408 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14410 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14413 mips32_op
= OPC_ADDI
;
14418 mips32_op
= OPC_ADDIU
;
14420 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14423 /* Logical operations */
14425 mips32_op
= OPC_ORI
;
14428 mips32_op
= OPC_XORI
;
14431 mips32_op
= OPC_ANDI
;
14433 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14436 /* Set less than immediate */
14438 mips32_op
= OPC_SLTI
;
14441 mips32_op
= OPC_SLTIU
;
14443 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14447 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14448 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14449 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14451 case JALS32
: /* BOVC, BEQC, BEQZALC */
14452 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14455 mips32_op
= OPC_BOVC
;
14456 } else if (rs
< rt
&& rs
== 0) {
14458 mips32_op
= OPC_BEQZALC
;
14461 mips32_op
= OPC_BEQC
;
14463 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14466 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14467 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14468 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14471 case BEQ32
: /* BC */
14472 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14474 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14475 sextract32(ctx
->opcode
<< 1, 0, 27));
14478 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14481 case BNE32
: /* BALC */
14482 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14484 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14485 sextract32(ctx
->opcode
<< 1, 0, 27));
14488 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14491 case J32
: /* BGTZC, BLTZC, BLTC */
14492 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14493 if (rs
== 0 && rt
!= 0) {
14495 mips32_op
= OPC_BGTZC
;
14496 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14498 mips32_op
= OPC_BLTZC
;
14501 mips32_op
= OPC_BLTC
;
14503 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14506 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14507 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14510 case JAL32
: /* BLEZC, BGEZC, BGEC */
14511 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14512 if (rs
== 0 && rt
!= 0) {
14514 mips32_op
= OPC_BLEZC
;
14515 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14517 mips32_op
= OPC_BGEZC
;
14520 mips32_op
= OPC_BGEC
;
14522 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14525 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14526 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14527 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14530 /* Floating point (COP1) */
14532 mips32_op
= OPC_LWC1
;
14535 mips32_op
= OPC_LDC1
;
14538 mips32_op
= OPC_SWC1
;
14541 mips32_op
= OPC_SDC1
;
14543 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14545 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14546 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14547 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14548 switch ((ctx
->opcode
>> 16) & 0x1f) {
14549 case ADDIUPC_00
... ADDIUPC_07
:
14550 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14553 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14556 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14558 case LWPC_08
... LWPC_0F
:
14559 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14562 generate_exception(ctx
, EXCP_RI
);
14567 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14568 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14570 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14573 case BNVC
: /* BNEC, BNEZALC */
14574 check_insn(ctx
, ISA_MIPS32R6
);
14577 mips32_op
= OPC_BNVC
;
14578 } else if (rs
< rt
&& rs
== 0) {
14580 mips32_op
= OPC_BNEZALC
;
14583 mips32_op
= OPC_BNEC
;
14585 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14587 case R6_BNEZC
: /* JIALC */
14588 check_insn(ctx
, ISA_MIPS32R6
);
14591 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14592 sextract32(ctx
->opcode
<< 1, 0, 22));
14595 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14598 case R6_BEQZC
: /* JIC */
14599 check_insn(ctx
, ISA_MIPS32R6
);
14602 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14603 sextract32(ctx
->opcode
<< 1, 0, 22));
14606 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14609 case BLEZALC
: /* BGEZALC, BGEUC */
14610 check_insn(ctx
, ISA_MIPS32R6
);
14611 if (rs
== 0 && rt
!= 0) {
14613 mips32_op
= OPC_BLEZALC
;
14614 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14616 mips32_op
= OPC_BGEZALC
;
14619 mips32_op
= OPC_BGEUC
;
14621 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14623 case BGTZALC
: /* BLTZALC, BLTUC */
14624 check_insn(ctx
, ISA_MIPS32R6
);
14625 if (rs
== 0 && rt
!= 0) {
14627 mips32_op
= OPC_BGTZALC
;
14628 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14630 mips32_op
= OPC_BLTZALC
;
14633 mips32_op
= OPC_BLTUC
;
14635 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14637 /* Loads and stores */
14639 mips32_op
= OPC_LB
;
14642 mips32_op
= OPC_LBU
;
14645 mips32_op
= OPC_LH
;
14648 mips32_op
= OPC_LHU
;
14651 mips32_op
= OPC_LW
;
14653 #ifdef TARGET_MIPS64
14655 check_insn(ctx
, ISA_MIPS3
);
14656 check_mips_64(ctx
);
14657 mips32_op
= OPC_LD
;
14660 check_insn(ctx
, ISA_MIPS3
);
14661 check_mips_64(ctx
);
14662 mips32_op
= OPC_SD
;
14666 mips32_op
= OPC_SB
;
14669 mips32_op
= OPC_SH
;
14672 mips32_op
= OPC_SW
;
14675 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14678 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14681 generate_exception_end(ctx
, EXCP_RI
);
14686 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14690 /* make sure instructions are on a halfword boundary */
14691 if (ctx
->pc
& 0x1) {
14692 env
->CP0_BadVAddr
= ctx
->pc
;
14693 generate_exception_end(ctx
, EXCP_AdEL
);
14697 op
= (ctx
->opcode
>> 10) & 0x3f;
14698 /* Enforce properly-sized instructions in a delay slot */
14699 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14700 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14702 /* POOL32A, POOL32B, POOL32I, POOL32C */
14704 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14706 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14708 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14710 /* LB32, LH32, LWC132, LDC132, LW32 */
14711 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14712 generate_exception_end(ctx
, EXCP_RI
);
14717 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14719 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14721 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14722 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14723 generate_exception_end(ctx
, EXCP_RI
);
14733 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14734 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14735 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14738 switch (ctx
->opcode
& 0x1) {
14746 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14747 /* In the Release 6 the register number location in
14748 * the instruction encoding has changed.
14750 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14752 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14758 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14759 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14760 int amount
= (ctx
->opcode
>> 1) & 0x7;
14762 amount
= amount
== 0 ? 8 : amount
;
14764 switch (ctx
->opcode
& 0x1) {
14773 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14777 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14778 gen_pool16c_r6_insn(ctx
);
14780 gen_pool16c_insn(ctx
);
14785 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14786 int rb
= 28; /* GP */
14787 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14789 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14793 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14794 if (ctx
->opcode
& 1) {
14795 generate_exception_end(ctx
, EXCP_RI
);
14798 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14799 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14800 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14801 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14806 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14807 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14808 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14809 offset
= (offset
== 0xf ? -1 : offset
);
14811 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14816 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14817 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14818 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14820 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14825 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14826 int rb
= 29; /* SP */
14827 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14829 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14834 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14835 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14836 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14838 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14843 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14844 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14845 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14847 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14852 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14853 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14854 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14856 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14861 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14862 int rb
= 29; /* SP */
14863 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14865 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14870 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14871 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14872 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14874 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14879 int rd
= uMIPS_RD5(ctx
->opcode
);
14880 int rs
= uMIPS_RS5(ctx
->opcode
);
14882 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14889 switch (ctx
->opcode
& 0x1) {
14899 switch (ctx
->opcode
& 0x1) {
14904 gen_addiur1sp(ctx
);
14908 case B16
: /* BC16 */
14909 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14910 sextract32(ctx
->opcode
, 0, 10) << 1,
14911 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14913 case BNEZ16
: /* BNEZC16 */
14914 case BEQZ16
: /* BEQZC16 */
14915 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14916 mmreg(uMIPS_RD(ctx
->opcode
)),
14917 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14918 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14923 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14924 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14926 imm
= (imm
== 0x7f ? -1 : imm
);
14927 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14933 generate_exception_end(ctx
, EXCP_RI
);
14936 decode_micromips32_opc(env
, ctx
);
14943 /* SmartMIPS extension to MIPS32 */
14945 #if defined(TARGET_MIPS64)
14947 /* MDMX extension to MIPS64 */
14951 /* MIPSDSP functions. */
14952 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14953 int rd
, int base
, int offset
)
14958 t0
= tcg_temp_new();
14961 gen_load_gpr(t0
, offset
);
14962 } else if (offset
== 0) {
14963 gen_load_gpr(t0
, base
);
14965 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
14970 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
14971 gen_store_gpr(t0
, rd
);
14974 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
14975 gen_store_gpr(t0
, rd
);
14978 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
14979 gen_store_gpr(t0
, rd
);
14981 #if defined(TARGET_MIPS64)
14983 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
14984 gen_store_gpr(t0
, rd
);
14991 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
14992 int ret
, int v1
, int v2
)
14998 /* Treat as NOP. */
15002 v1_t
= tcg_temp_new();
15003 v2_t
= tcg_temp_new();
15005 gen_load_gpr(v1_t
, v1
);
15006 gen_load_gpr(v2_t
, v2
);
15009 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15010 case OPC_MULT_G_2E
:
15014 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15016 case OPC_ADDUH_R_QB
:
15017 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15020 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15022 case OPC_ADDQH_R_PH
:
15023 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15026 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15028 case OPC_ADDQH_R_W
:
15029 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15032 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15034 case OPC_SUBUH_R_QB
:
15035 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15038 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15040 case OPC_SUBQH_R_PH
:
15041 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15044 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15046 case OPC_SUBQH_R_W
:
15047 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15051 case OPC_ABSQ_S_PH_DSP
:
15053 case OPC_ABSQ_S_QB
:
15055 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15057 case OPC_ABSQ_S_PH
:
15059 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15063 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15065 case OPC_PRECEQ_W_PHL
:
15067 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15068 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15070 case OPC_PRECEQ_W_PHR
:
15072 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15073 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15074 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15076 case OPC_PRECEQU_PH_QBL
:
15078 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15080 case OPC_PRECEQU_PH_QBR
:
15082 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15084 case OPC_PRECEQU_PH_QBLA
:
15086 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15088 case OPC_PRECEQU_PH_QBRA
:
15090 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15092 case OPC_PRECEU_PH_QBL
:
15094 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15096 case OPC_PRECEU_PH_QBR
:
15098 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15100 case OPC_PRECEU_PH_QBLA
:
15102 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15104 case OPC_PRECEU_PH_QBRA
:
15106 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15110 case OPC_ADDU_QB_DSP
:
15114 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15116 case OPC_ADDQ_S_PH
:
15118 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15122 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15126 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15128 case OPC_ADDU_S_QB
:
15130 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15134 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15136 case OPC_ADDU_S_PH
:
15138 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15142 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15144 case OPC_SUBQ_S_PH
:
15146 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15150 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15154 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15156 case OPC_SUBU_S_QB
:
15158 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15162 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15164 case OPC_SUBU_S_PH
:
15166 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15170 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15174 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15178 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15180 case OPC_RADDU_W_QB
:
15182 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15186 case OPC_CMPU_EQ_QB_DSP
:
15188 case OPC_PRECR_QB_PH
:
15190 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15192 case OPC_PRECRQ_QB_PH
:
15194 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15196 case OPC_PRECR_SRA_PH_W
:
15199 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15200 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15202 tcg_temp_free_i32(sa_t
);
15205 case OPC_PRECR_SRA_R_PH_W
:
15208 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15209 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15211 tcg_temp_free_i32(sa_t
);
15214 case OPC_PRECRQ_PH_W
:
15216 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15218 case OPC_PRECRQ_RS_PH_W
:
15220 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15222 case OPC_PRECRQU_S_QB_PH
:
15224 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15228 #ifdef TARGET_MIPS64
15229 case OPC_ABSQ_S_QH_DSP
:
15231 case OPC_PRECEQ_L_PWL
:
15233 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15235 case OPC_PRECEQ_L_PWR
:
15237 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15239 case OPC_PRECEQ_PW_QHL
:
15241 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15243 case OPC_PRECEQ_PW_QHR
:
15245 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15247 case OPC_PRECEQ_PW_QHLA
:
15249 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15251 case OPC_PRECEQ_PW_QHRA
:
15253 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15255 case OPC_PRECEQU_QH_OBL
:
15257 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15259 case OPC_PRECEQU_QH_OBR
:
15261 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15263 case OPC_PRECEQU_QH_OBLA
:
15265 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15267 case OPC_PRECEQU_QH_OBRA
:
15269 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15271 case OPC_PRECEU_QH_OBL
:
15273 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15275 case OPC_PRECEU_QH_OBR
:
15277 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15279 case OPC_PRECEU_QH_OBLA
:
15281 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15283 case OPC_PRECEU_QH_OBRA
:
15285 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15287 case OPC_ABSQ_S_OB
:
15289 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15291 case OPC_ABSQ_S_PW
:
15293 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15295 case OPC_ABSQ_S_QH
:
15297 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15301 case OPC_ADDU_OB_DSP
:
15303 case OPC_RADDU_L_OB
:
15305 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15309 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15311 case OPC_SUBQ_S_PW
:
15313 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15317 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15319 case OPC_SUBQ_S_QH
:
15321 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15325 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15327 case OPC_SUBU_S_OB
:
15329 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15333 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15335 case OPC_SUBU_S_QH
:
15337 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15341 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15343 case OPC_SUBUH_R_OB
:
15345 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15349 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15351 case OPC_ADDQ_S_PW
:
15353 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15357 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15359 case OPC_ADDQ_S_QH
:
15361 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15365 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15367 case OPC_ADDU_S_OB
:
15369 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15373 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15375 case OPC_ADDU_S_QH
:
15377 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15381 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15383 case OPC_ADDUH_R_OB
:
15385 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15389 case OPC_CMPU_EQ_OB_DSP
:
15391 case OPC_PRECR_OB_QH
:
15393 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15395 case OPC_PRECR_SRA_QH_PW
:
15398 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15399 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15400 tcg_temp_free_i32(ret_t
);
15403 case OPC_PRECR_SRA_R_QH_PW
:
15406 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15407 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15408 tcg_temp_free_i32(sa_v
);
15411 case OPC_PRECRQ_OB_QH
:
15413 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15415 case OPC_PRECRQ_PW_L
:
15417 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15419 case OPC_PRECRQ_QH_PW
:
15421 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15423 case OPC_PRECRQ_RS_QH_PW
:
15425 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15427 case OPC_PRECRQU_S_OB_QH
:
15429 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15436 tcg_temp_free(v1_t
);
15437 tcg_temp_free(v2_t
);
15440 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15441 int ret
, int v1
, int v2
)
15449 /* Treat as NOP. */
15453 t0
= tcg_temp_new();
15454 v1_t
= tcg_temp_new();
15455 v2_t
= tcg_temp_new();
15457 tcg_gen_movi_tl(t0
, v1
);
15458 gen_load_gpr(v1_t
, v1
);
15459 gen_load_gpr(v2_t
, v2
);
15462 case OPC_SHLL_QB_DSP
:
15464 op2
= MASK_SHLL_QB(ctx
->opcode
);
15468 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15472 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15476 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15480 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15482 case OPC_SHLL_S_PH
:
15484 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15486 case OPC_SHLLV_S_PH
:
15488 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15492 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15494 case OPC_SHLLV_S_W
:
15496 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15500 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15504 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15508 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15512 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15516 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15518 case OPC_SHRA_R_QB
:
15520 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15524 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15526 case OPC_SHRAV_R_QB
:
15528 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15532 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15534 case OPC_SHRA_R_PH
:
15536 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15540 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15542 case OPC_SHRAV_R_PH
:
15544 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15548 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15550 case OPC_SHRAV_R_W
:
15552 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15554 default: /* Invalid */
15555 MIPS_INVAL("MASK SHLL.QB");
15556 generate_exception_end(ctx
, EXCP_RI
);
15561 #ifdef TARGET_MIPS64
15562 case OPC_SHLL_OB_DSP
:
15563 op2
= MASK_SHLL_OB(ctx
->opcode
);
15567 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15571 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15573 case OPC_SHLL_S_PW
:
15575 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15577 case OPC_SHLLV_S_PW
:
15579 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15583 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15587 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15591 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15595 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15597 case OPC_SHLL_S_QH
:
15599 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15601 case OPC_SHLLV_S_QH
:
15603 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15607 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15611 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15613 case OPC_SHRA_R_OB
:
15615 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15617 case OPC_SHRAV_R_OB
:
15619 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15623 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15627 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15629 case OPC_SHRA_R_PW
:
15631 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15633 case OPC_SHRAV_R_PW
:
15635 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15639 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15643 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15645 case OPC_SHRA_R_QH
:
15647 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15649 case OPC_SHRAV_R_QH
:
15651 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15655 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15659 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15663 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15667 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15669 default: /* Invalid */
15670 MIPS_INVAL("MASK SHLL.OB");
15671 generate_exception_end(ctx
, EXCP_RI
);
15679 tcg_temp_free(v1_t
);
15680 tcg_temp_free(v2_t
);
15683 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15684 int ret
, int v1
, int v2
, int check_ret
)
15690 if ((ret
== 0) && (check_ret
== 1)) {
15691 /* Treat as NOP. */
15695 t0
= tcg_temp_new_i32();
15696 v1_t
= tcg_temp_new();
15697 v2_t
= tcg_temp_new();
15699 tcg_gen_movi_i32(t0
, ret
);
15700 gen_load_gpr(v1_t
, v1
);
15701 gen_load_gpr(v2_t
, v2
);
15704 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15705 * the same mask and op1. */
15706 case OPC_MULT_G_2E
:
15710 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15713 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15716 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15718 case OPC_MULQ_RS_W
:
15719 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15723 case OPC_DPA_W_PH_DSP
:
15725 case OPC_DPAU_H_QBL
:
15727 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15729 case OPC_DPAU_H_QBR
:
15731 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15733 case OPC_DPSU_H_QBL
:
15735 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15737 case OPC_DPSU_H_QBR
:
15739 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15743 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15745 case OPC_DPAX_W_PH
:
15747 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15749 case OPC_DPAQ_S_W_PH
:
15751 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15753 case OPC_DPAQX_S_W_PH
:
15755 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15757 case OPC_DPAQX_SA_W_PH
:
15759 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15763 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15765 case OPC_DPSX_W_PH
:
15767 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15769 case OPC_DPSQ_S_W_PH
:
15771 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15773 case OPC_DPSQX_S_W_PH
:
15775 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15777 case OPC_DPSQX_SA_W_PH
:
15779 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15781 case OPC_MULSAQ_S_W_PH
:
15783 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15785 case OPC_DPAQ_SA_L_W
:
15787 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15789 case OPC_DPSQ_SA_L_W
:
15791 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15793 case OPC_MAQ_S_W_PHL
:
15795 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15797 case OPC_MAQ_S_W_PHR
:
15799 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15801 case OPC_MAQ_SA_W_PHL
:
15803 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15805 case OPC_MAQ_SA_W_PHR
:
15807 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15809 case OPC_MULSA_W_PH
:
15811 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15815 #ifdef TARGET_MIPS64
15816 case OPC_DPAQ_W_QH_DSP
:
15818 int ac
= ret
& 0x03;
15819 tcg_gen_movi_i32(t0
, ac
);
15824 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15828 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15832 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15836 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15840 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15842 case OPC_DPAQ_S_W_QH
:
15844 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15846 case OPC_DPAQ_SA_L_PW
:
15848 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15850 case OPC_DPAU_H_OBL
:
15852 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15854 case OPC_DPAU_H_OBR
:
15856 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15860 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15862 case OPC_DPSQ_S_W_QH
:
15864 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15866 case OPC_DPSQ_SA_L_PW
:
15868 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15870 case OPC_DPSU_H_OBL
:
15872 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15874 case OPC_DPSU_H_OBR
:
15876 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15878 case OPC_MAQ_S_L_PWL
:
15880 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15882 case OPC_MAQ_S_L_PWR
:
15884 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15886 case OPC_MAQ_S_W_QHLL
:
15888 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15890 case OPC_MAQ_SA_W_QHLL
:
15892 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15894 case OPC_MAQ_S_W_QHLR
:
15896 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15898 case OPC_MAQ_SA_W_QHLR
:
15900 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15902 case OPC_MAQ_S_W_QHRL
:
15904 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15906 case OPC_MAQ_SA_W_QHRL
:
15908 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15910 case OPC_MAQ_S_W_QHRR
:
15912 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15914 case OPC_MAQ_SA_W_QHRR
:
15916 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15918 case OPC_MULSAQ_S_L_PW
:
15920 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15922 case OPC_MULSAQ_S_W_QH
:
15924 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15930 case OPC_ADDU_QB_DSP
:
15932 case OPC_MULEU_S_PH_QBL
:
15934 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15936 case OPC_MULEU_S_PH_QBR
:
15938 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15940 case OPC_MULQ_RS_PH
:
15942 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15944 case OPC_MULEQ_S_W_PHL
:
15946 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15948 case OPC_MULEQ_S_W_PHR
:
15950 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15952 case OPC_MULQ_S_PH
:
15954 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15958 #ifdef TARGET_MIPS64
15959 case OPC_ADDU_OB_DSP
:
15961 case OPC_MULEQ_S_PW_QHL
:
15963 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15965 case OPC_MULEQ_S_PW_QHR
:
15967 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15969 case OPC_MULEU_S_QH_OBL
:
15971 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15973 case OPC_MULEU_S_QH_OBR
:
15975 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15977 case OPC_MULQ_RS_QH
:
15979 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15986 tcg_temp_free_i32(t0
);
15987 tcg_temp_free(v1_t
);
15988 tcg_temp_free(v2_t
);
15991 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15999 /* Treat as NOP. */
16003 t0
= tcg_temp_new();
16004 val_t
= tcg_temp_new();
16005 gen_load_gpr(val_t
, val
);
16008 case OPC_ABSQ_S_PH_DSP
:
16012 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16017 target_long result
;
16018 imm
= (ctx
->opcode
>> 16) & 0xFF;
16019 result
= (uint32_t)imm
<< 24 |
16020 (uint32_t)imm
<< 16 |
16021 (uint32_t)imm
<< 8 |
16023 result
= (int32_t)result
;
16024 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16029 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16030 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16031 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16032 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16033 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16034 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16039 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16040 imm
= (int16_t)(imm
<< 6) >> 6;
16041 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16042 (target_long
)((int32_t)imm
<< 16 | \
16048 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16049 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16050 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16051 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16055 #ifdef TARGET_MIPS64
16056 case OPC_ABSQ_S_QH_DSP
:
16063 imm
= (ctx
->opcode
>> 16) & 0xFF;
16064 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16065 temp
= (temp
<< 16) | temp
;
16066 temp
= (temp
<< 32) | temp
;
16067 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16075 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16076 imm
= (int16_t)(imm
<< 6) >> 6;
16077 temp
= ((target_long
)imm
<< 32) \
16078 | ((target_long
)imm
& 0xFFFFFFFF);
16079 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16087 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16088 imm
= (int16_t)(imm
<< 6) >> 6;
16090 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16091 ((uint64_t)(uint16_t)imm
<< 32) |
16092 ((uint64_t)(uint16_t)imm
<< 16) |
16093 (uint64_t)(uint16_t)imm
;
16094 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16099 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16100 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16101 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16102 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16103 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16104 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16105 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16109 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16110 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16111 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16115 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16116 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16117 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16118 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16119 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16126 tcg_temp_free(val_t
);
16129 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16130 uint32_t op1
, uint32_t op2
,
16131 int ret
, int v1
, int v2
, int check_ret
)
16137 if ((ret
== 0) && (check_ret
== 1)) {
16138 /* Treat as NOP. */
16142 t1
= tcg_temp_new();
16143 v1_t
= tcg_temp_new();
16144 v2_t
= tcg_temp_new();
16146 gen_load_gpr(v1_t
, v1
);
16147 gen_load_gpr(v2_t
, v2
);
16150 case OPC_CMPU_EQ_QB_DSP
:
16152 case OPC_CMPU_EQ_QB
:
16154 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16156 case OPC_CMPU_LT_QB
:
16158 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16160 case OPC_CMPU_LE_QB
:
16162 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16164 case OPC_CMPGU_EQ_QB
:
16166 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16168 case OPC_CMPGU_LT_QB
:
16170 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16172 case OPC_CMPGU_LE_QB
:
16174 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16176 case OPC_CMPGDU_EQ_QB
:
16178 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16179 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16180 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16181 tcg_gen_shli_tl(t1
, t1
, 24);
16182 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16184 case OPC_CMPGDU_LT_QB
:
16186 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16187 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16188 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16189 tcg_gen_shli_tl(t1
, t1
, 24);
16190 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16192 case OPC_CMPGDU_LE_QB
:
16194 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16195 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16196 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16197 tcg_gen_shli_tl(t1
, t1
, 24);
16198 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16200 case OPC_CMP_EQ_PH
:
16202 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16204 case OPC_CMP_LT_PH
:
16206 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16208 case OPC_CMP_LE_PH
:
16210 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16214 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16218 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16220 case OPC_PACKRL_PH
:
16222 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16226 #ifdef TARGET_MIPS64
16227 case OPC_CMPU_EQ_OB_DSP
:
16229 case OPC_CMP_EQ_PW
:
16231 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16233 case OPC_CMP_LT_PW
:
16235 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16237 case OPC_CMP_LE_PW
:
16239 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16241 case OPC_CMP_EQ_QH
:
16243 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16245 case OPC_CMP_LT_QH
:
16247 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16249 case OPC_CMP_LE_QH
:
16251 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16253 case OPC_CMPGDU_EQ_OB
:
16255 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16257 case OPC_CMPGDU_LT_OB
:
16259 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16261 case OPC_CMPGDU_LE_OB
:
16263 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16265 case OPC_CMPGU_EQ_OB
:
16267 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16269 case OPC_CMPGU_LT_OB
:
16271 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16273 case OPC_CMPGU_LE_OB
:
16275 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16277 case OPC_CMPU_EQ_OB
:
16279 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16281 case OPC_CMPU_LT_OB
:
16283 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16285 case OPC_CMPU_LE_OB
:
16287 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16289 case OPC_PACKRL_PW
:
16291 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16295 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16299 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16303 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16311 tcg_temp_free(v1_t
);
16312 tcg_temp_free(v2_t
);
16315 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16316 uint32_t op1
, int rt
, int rs
, int sa
)
16323 /* Treat as NOP. */
16327 t0
= tcg_temp_new();
16328 gen_load_gpr(t0
, rs
);
16331 case OPC_APPEND_DSP
:
16332 switch (MASK_APPEND(ctx
->opcode
)) {
16335 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16337 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16341 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16342 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16343 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16344 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16346 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16350 if (sa
!= 0 && sa
!= 2) {
16351 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16352 tcg_gen_ext32u_tl(t0
, t0
);
16353 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16354 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16356 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16358 default: /* Invalid */
16359 MIPS_INVAL("MASK APPEND");
16360 generate_exception_end(ctx
, EXCP_RI
);
16364 #ifdef TARGET_MIPS64
16365 case OPC_DAPPEND_DSP
:
16366 switch (MASK_DAPPEND(ctx
->opcode
)) {
16369 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16373 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16374 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16375 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16379 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16380 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16381 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16386 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16387 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16388 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16389 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16392 default: /* Invalid */
16393 MIPS_INVAL("MASK DAPPEND");
16394 generate_exception_end(ctx
, EXCP_RI
);
16403 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16404 int ret
, int v1
, int v2
, int check_ret
)
16413 if ((ret
== 0) && (check_ret
== 1)) {
16414 /* Treat as NOP. */
16418 t0
= tcg_temp_new();
16419 t1
= tcg_temp_new();
16420 v1_t
= tcg_temp_new();
16421 v2_t
= tcg_temp_new();
16423 gen_load_gpr(v1_t
, v1
);
16424 gen_load_gpr(v2_t
, v2
);
16427 case OPC_EXTR_W_DSP
:
16431 tcg_gen_movi_tl(t0
, v2
);
16432 tcg_gen_movi_tl(t1
, v1
);
16433 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16436 tcg_gen_movi_tl(t0
, v2
);
16437 tcg_gen_movi_tl(t1
, v1
);
16438 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16440 case OPC_EXTR_RS_W
:
16441 tcg_gen_movi_tl(t0
, v2
);
16442 tcg_gen_movi_tl(t1
, v1
);
16443 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16446 tcg_gen_movi_tl(t0
, v2
);
16447 tcg_gen_movi_tl(t1
, v1
);
16448 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16450 case OPC_EXTRV_S_H
:
16451 tcg_gen_movi_tl(t0
, v2
);
16452 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16455 tcg_gen_movi_tl(t0
, v2
);
16456 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16458 case OPC_EXTRV_R_W
:
16459 tcg_gen_movi_tl(t0
, v2
);
16460 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16462 case OPC_EXTRV_RS_W
:
16463 tcg_gen_movi_tl(t0
, v2
);
16464 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16467 tcg_gen_movi_tl(t0
, v2
);
16468 tcg_gen_movi_tl(t1
, v1
);
16469 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16472 tcg_gen_movi_tl(t0
, v2
);
16473 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16476 tcg_gen_movi_tl(t0
, v2
);
16477 tcg_gen_movi_tl(t1
, v1
);
16478 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16481 tcg_gen_movi_tl(t0
, v2
);
16482 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16485 imm
= (ctx
->opcode
>> 20) & 0x3F;
16486 tcg_gen_movi_tl(t0
, ret
);
16487 tcg_gen_movi_tl(t1
, imm
);
16488 gen_helper_shilo(t0
, t1
, cpu_env
);
16491 tcg_gen_movi_tl(t0
, ret
);
16492 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16495 tcg_gen_movi_tl(t0
, ret
);
16496 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16499 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16500 tcg_gen_movi_tl(t0
, imm
);
16501 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16504 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16505 tcg_gen_movi_tl(t0
, imm
);
16506 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16510 #ifdef TARGET_MIPS64
16511 case OPC_DEXTR_W_DSP
:
16515 tcg_gen_movi_tl(t0
, ret
);
16516 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16520 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16521 int ac
= (ctx
->opcode
>> 11) & 0x03;
16522 tcg_gen_movi_tl(t0
, shift
);
16523 tcg_gen_movi_tl(t1
, ac
);
16524 gen_helper_dshilo(t0
, t1
, cpu_env
);
16529 int ac
= (ctx
->opcode
>> 11) & 0x03;
16530 tcg_gen_movi_tl(t0
, ac
);
16531 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16535 tcg_gen_movi_tl(t0
, v2
);
16536 tcg_gen_movi_tl(t1
, v1
);
16538 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16541 tcg_gen_movi_tl(t0
, v2
);
16542 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16545 tcg_gen_movi_tl(t0
, v2
);
16546 tcg_gen_movi_tl(t1
, v1
);
16547 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16550 tcg_gen_movi_tl(t0
, v2
);
16551 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16554 tcg_gen_movi_tl(t0
, v2
);
16555 tcg_gen_movi_tl(t1
, v1
);
16556 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16558 case OPC_DEXTR_R_L
:
16559 tcg_gen_movi_tl(t0
, v2
);
16560 tcg_gen_movi_tl(t1
, v1
);
16561 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16563 case OPC_DEXTR_RS_L
:
16564 tcg_gen_movi_tl(t0
, v2
);
16565 tcg_gen_movi_tl(t1
, v1
);
16566 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16569 tcg_gen_movi_tl(t0
, v2
);
16570 tcg_gen_movi_tl(t1
, v1
);
16571 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16573 case OPC_DEXTR_R_W
:
16574 tcg_gen_movi_tl(t0
, v2
);
16575 tcg_gen_movi_tl(t1
, v1
);
16576 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16578 case OPC_DEXTR_RS_W
:
16579 tcg_gen_movi_tl(t0
, v2
);
16580 tcg_gen_movi_tl(t1
, v1
);
16581 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16583 case OPC_DEXTR_S_H
:
16584 tcg_gen_movi_tl(t0
, v2
);
16585 tcg_gen_movi_tl(t1
, v1
);
16586 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16588 case OPC_DEXTRV_S_H
:
16589 tcg_gen_movi_tl(t0
, v2
);
16590 tcg_gen_movi_tl(t1
, v1
);
16591 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16594 tcg_gen_movi_tl(t0
, v2
);
16595 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16597 case OPC_DEXTRV_R_L
:
16598 tcg_gen_movi_tl(t0
, v2
);
16599 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16601 case OPC_DEXTRV_RS_L
:
16602 tcg_gen_movi_tl(t0
, v2
);
16603 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16606 tcg_gen_movi_tl(t0
, v2
);
16607 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16609 case OPC_DEXTRV_R_W
:
16610 tcg_gen_movi_tl(t0
, v2
);
16611 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16613 case OPC_DEXTRV_RS_W
:
16614 tcg_gen_movi_tl(t0
, v2
);
16615 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16624 tcg_temp_free(v1_t
);
16625 tcg_temp_free(v2_t
);
16628 /* End MIPSDSP functions. */
16630 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16632 int rs
, rt
, rd
, sa
;
16635 rs
= (ctx
->opcode
>> 21) & 0x1f;
16636 rt
= (ctx
->opcode
>> 16) & 0x1f;
16637 rd
= (ctx
->opcode
>> 11) & 0x1f;
16638 sa
= (ctx
->opcode
>> 6) & 0x1f;
16640 op1
= MASK_SPECIAL(ctx
->opcode
);
16643 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16645 case OPC_MULT
... OPC_DIVU
:
16646 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16656 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16659 MIPS_INVAL("special_r6 muldiv");
16660 generate_exception_end(ctx
, EXCP_RI
);
16666 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16670 if (rt
== 0 && sa
== 1) {
16671 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16672 We need additionally to check other fields */
16673 gen_cl(ctx
, op1
, rd
, rs
);
16675 generate_exception_end(ctx
, EXCP_RI
);
16679 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16680 gen_helper_do_semihosting(cpu_env
);
16682 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16683 generate_exception_end(ctx
, EXCP_RI
);
16685 generate_exception_end(ctx
, EXCP_DBp
);
16689 #if defined(TARGET_MIPS64)
16691 check_mips_64(ctx
);
16692 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16696 if (rt
== 0 && sa
== 1) {
16697 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16698 We need additionally to check other fields */
16699 check_mips_64(ctx
);
16700 gen_cl(ctx
, op1
, rd
, rs
);
16702 generate_exception_end(ctx
, EXCP_RI
);
16705 case OPC_DMULT
... OPC_DDIVU
:
16706 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16716 check_mips_64(ctx
);
16717 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16720 MIPS_INVAL("special_r6 muldiv");
16721 generate_exception_end(ctx
, EXCP_RI
);
16726 default: /* Invalid */
16727 MIPS_INVAL("special_r6");
16728 generate_exception_end(ctx
, EXCP_RI
);
16733 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16735 int rs
, rt
, rd
, sa
;
16738 rs
= (ctx
->opcode
>> 21) & 0x1f;
16739 rt
= (ctx
->opcode
>> 16) & 0x1f;
16740 rd
= (ctx
->opcode
>> 11) & 0x1f;
16741 sa
= (ctx
->opcode
>> 6) & 0x1f;
16743 op1
= MASK_SPECIAL(ctx
->opcode
);
16745 case OPC_MOVN
: /* Conditional move */
16747 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16748 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16749 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16751 case OPC_MFHI
: /* Move from HI/LO */
16753 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16756 case OPC_MTLO
: /* Move to HI/LO */
16757 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16760 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16761 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16762 check_cp1_enabled(ctx
);
16763 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16764 (ctx
->opcode
>> 16) & 1);
16766 generate_exception_err(ctx
, EXCP_CpU
, 1);
16772 check_insn(ctx
, INSN_VR54XX
);
16773 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16774 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16776 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16781 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16783 #if defined(TARGET_MIPS64)
16784 case OPC_DMULT
... OPC_DDIVU
:
16785 check_insn(ctx
, ISA_MIPS3
);
16786 check_mips_64(ctx
);
16787 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16791 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16794 #ifdef MIPS_STRICT_STANDARD
16795 MIPS_INVAL("SPIM");
16796 generate_exception_end(ctx
, EXCP_RI
);
16798 /* Implemented as RI exception for now. */
16799 MIPS_INVAL("spim (unofficial)");
16800 generate_exception_end(ctx
, EXCP_RI
);
16803 default: /* Invalid */
16804 MIPS_INVAL("special_legacy");
16805 generate_exception_end(ctx
, EXCP_RI
);
16810 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16812 int rs
, rt
, rd
, sa
;
16815 rs
= (ctx
->opcode
>> 21) & 0x1f;
16816 rt
= (ctx
->opcode
>> 16) & 0x1f;
16817 rd
= (ctx
->opcode
>> 11) & 0x1f;
16818 sa
= (ctx
->opcode
>> 6) & 0x1f;
16820 op1
= MASK_SPECIAL(ctx
->opcode
);
16822 case OPC_SLL
: /* Shift with immediate */
16823 if (sa
== 5 && rd
== 0 &&
16824 rs
== 0 && rt
== 0) { /* PAUSE */
16825 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16826 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16827 generate_exception_end(ctx
, EXCP_RI
);
16833 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16836 switch ((ctx
->opcode
>> 21) & 0x1f) {
16838 /* rotr is decoded as srl on non-R2 CPUs */
16839 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16844 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16847 generate_exception_end(ctx
, EXCP_RI
);
16851 case OPC_ADD
... OPC_SUBU
:
16852 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16854 case OPC_SLLV
: /* Shifts */
16856 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16859 switch ((ctx
->opcode
>> 6) & 0x1f) {
16861 /* rotrv is decoded as srlv on non-R2 CPUs */
16862 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16867 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16870 generate_exception_end(ctx
, EXCP_RI
);
16874 case OPC_SLT
: /* Set on less than */
16876 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16878 case OPC_AND
: /* Logic*/
16882 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16885 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16887 case OPC_TGE
... OPC_TEQ
: /* Traps */
16889 check_insn(ctx
, ISA_MIPS2
);
16890 gen_trap(ctx
, op1
, rs
, rt
, -1);
16892 case OPC_LSA
: /* OPC_PMON */
16893 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16894 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16895 decode_opc_special_r6(env
, ctx
);
16897 /* Pmon entry point, also R4010 selsl */
16898 #ifdef MIPS_STRICT_STANDARD
16899 MIPS_INVAL("PMON / selsl");
16900 generate_exception_end(ctx
, EXCP_RI
);
16902 gen_helper_0e0i(pmon
, sa
);
16907 generate_exception_end(ctx
, EXCP_SYSCALL
);
16910 generate_exception_end(ctx
, EXCP_BREAK
);
16913 check_insn(ctx
, ISA_MIPS2
);
16914 /* Treat as NOP. */
16917 #if defined(TARGET_MIPS64)
16918 /* MIPS64 specific opcodes */
16923 check_insn(ctx
, ISA_MIPS3
);
16924 check_mips_64(ctx
);
16925 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16928 switch ((ctx
->opcode
>> 21) & 0x1f) {
16930 /* drotr is decoded as dsrl on non-R2 CPUs */
16931 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16936 check_insn(ctx
, ISA_MIPS3
);
16937 check_mips_64(ctx
);
16938 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16941 generate_exception_end(ctx
, EXCP_RI
);
16946 switch ((ctx
->opcode
>> 21) & 0x1f) {
16948 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16949 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16954 check_insn(ctx
, ISA_MIPS3
);
16955 check_mips_64(ctx
);
16956 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16959 generate_exception_end(ctx
, EXCP_RI
);
16963 case OPC_DADD
... OPC_DSUBU
:
16964 check_insn(ctx
, ISA_MIPS3
);
16965 check_mips_64(ctx
);
16966 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16970 check_insn(ctx
, ISA_MIPS3
);
16971 check_mips_64(ctx
);
16972 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16975 switch ((ctx
->opcode
>> 6) & 0x1f) {
16977 /* drotrv is decoded as dsrlv on non-R2 CPUs */
16978 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16983 check_insn(ctx
, ISA_MIPS3
);
16984 check_mips_64(ctx
);
16985 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16988 generate_exception_end(ctx
, EXCP_RI
);
16993 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16994 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16995 decode_opc_special_r6(env
, ctx
);
17000 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17001 decode_opc_special_r6(env
, ctx
);
17003 decode_opc_special_legacy(env
, ctx
);
17008 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17013 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17015 rs
= (ctx
->opcode
>> 21) & 0x1f;
17016 rt
= (ctx
->opcode
>> 16) & 0x1f;
17017 rd
= (ctx
->opcode
>> 11) & 0x1f;
17019 op1
= MASK_SPECIAL2(ctx
->opcode
);
17021 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17022 case OPC_MSUB
... OPC_MSUBU
:
17023 check_insn(ctx
, ISA_MIPS32
);
17024 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17027 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17030 case OPC_DIVU_G_2F
:
17031 case OPC_MULT_G_2F
:
17032 case OPC_MULTU_G_2F
:
17034 case OPC_MODU_G_2F
:
17035 check_insn(ctx
, INSN_LOONGSON2F
);
17036 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17040 check_insn(ctx
, ISA_MIPS32
);
17041 gen_cl(ctx
, op1
, rd
, rs
);
17044 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17045 gen_helper_do_semihosting(cpu_env
);
17047 /* XXX: not clear which exception should be raised
17048 * when in debug mode...
17050 check_insn(ctx
, ISA_MIPS32
);
17051 generate_exception_end(ctx
, EXCP_DBp
);
17054 #if defined(TARGET_MIPS64)
17057 check_insn(ctx
, ISA_MIPS64
);
17058 check_mips_64(ctx
);
17059 gen_cl(ctx
, op1
, rd
, rs
);
17061 case OPC_DMULT_G_2F
:
17062 case OPC_DMULTU_G_2F
:
17063 case OPC_DDIV_G_2F
:
17064 case OPC_DDIVU_G_2F
:
17065 case OPC_DMOD_G_2F
:
17066 case OPC_DMODU_G_2F
:
17067 check_insn(ctx
, INSN_LOONGSON2F
);
17068 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17071 default: /* Invalid */
17072 MIPS_INVAL("special2_legacy");
17073 generate_exception_end(ctx
, EXCP_RI
);
17078 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17080 int rs
, rt
, rd
, sa
;
17084 rs
= (ctx
->opcode
>> 21) & 0x1f;
17085 rt
= (ctx
->opcode
>> 16) & 0x1f;
17086 rd
= (ctx
->opcode
>> 11) & 0x1f;
17087 sa
= (ctx
->opcode
>> 6) & 0x1f;
17088 imm
= (int16_t)ctx
->opcode
>> 7;
17090 op1
= MASK_SPECIAL3(ctx
->opcode
);
17094 /* hint codes 24-31 are reserved and signal RI */
17095 generate_exception_end(ctx
, EXCP_RI
);
17097 /* Treat as NOP. */
17100 /* Treat as NOP. */
17103 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17106 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17111 /* Treat as NOP. */
17114 op2
= MASK_BSHFL(ctx
->opcode
);
17116 case OPC_ALIGN
... OPC_ALIGN_END
:
17117 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17120 gen_bitswap(ctx
, op2
, rd
, rt
);
17125 #if defined(TARGET_MIPS64)
17127 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17130 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17133 check_mips_64(ctx
);
17136 /* Treat as NOP. */
17139 op2
= MASK_DBSHFL(ctx
->opcode
);
17141 case OPC_DALIGN
... OPC_DALIGN_END
:
17142 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17145 gen_bitswap(ctx
, op2
, rd
, rt
);
17152 default: /* Invalid */
17153 MIPS_INVAL("special3_r6");
17154 generate_exception_end(ctx
, EXCP_RI
);
17159 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17164 rs
= (ctx
->opcode
>> 21) & 0x1f;
17165 rt
= (ctx
->opcode
>> 16) & 0x1f;
17166 rd
= (ctx
->opcode
>> 11) & 0x1f;
17168 op1
= MASK_SPECIAL3(ctx
->opcode
);
17170 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17171 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17172 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17173 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17174 * the same mask and op1. */
17175 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17176 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17179 case OPC_ADDUH_R_QB
:
17181 case OPC_ADDQH_R_PH
:
17183 case OPC_ADDQH_R_W
:
17185 case OPC_SUBUH_R_QB
:
17187 case OPC_SUBQH_R_PH
:
17189 case OPC_SUBQH_R_W
:
17190 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17195 case OPC_MULQ_RS_W
:
17196 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17199 MIPS_INVAL("MASK ADDUH.QB");
17200 generate_exception_end(ctx
, EXCP_RI
);
17203 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17204 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17206 generate_exception_end(ctx
, EXCP_RI
);
17210 op2
= MASK_LX(ctx
->opcode
);
17212 #if defined(TARGET_MIPS64)
17218 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17220 default: /* Invalid */
17221 MIPS_INVAL("MASK LX");
17222 generate_exception_end(ctx
, EXCP_RI
);
17226 case OPC_ABSQ_S_PH_DSP
:
17227 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17229 case OPC_ABSQ_S_QB
:
17230 case OPC_ABSQ_S_PH
:
17232 case OPC_PRECEQ_W_PHL
:
17233 case OPC_PRECEQ_W_PHR
:
17234 case OPC_PRECEQU_PH_QBL
:
17235 case OPC_PRECEQU_PH_QBR
:
17236 case OPC_PRECEQU_PH_QBLA
:
17237 case OPC_PRECEQU_PH_QBRA
:
17238 case OPC_PRECEU_PH_QBL
:
17239 case OPC_PRECEU_PH_QBR
:
17240 case OPC_PRECEU_PH_QBLA
:
17241 case OPC_PRECEU_PH_QBRA
:
17242 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17249 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17252 MIPS_INVAL("MASK ABSQ_S.PH");
17253 generate_exception_end(ctx
, EXCP_RI
);
17257 case OPC_ADDU_QB_DSP
:
17258 op2
= MASK_ADDU_QB(ctx
->opcode
);
17261 case OPC_ADDQ_S_PH
:
17264 case OPC_ADDU_S_QB
:
17266 case OPC_ADDU_S_PH
:
17268 case OPC_SUBQ_S_PH
:
17271 case OPC_SUBU_S_QB
:
17273 case OPC_SUBU_S_PH
:
17277 case OPC_RADDU_W_QB
:
17278 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17280 case OPC_MULEU_S_PH_QBL
:
17281 case OPC_MULEU_S_PH_QBR
:
17282 case OPC_MULQ_RS_PH
:
17283 case OPC_MULEQ_S_W_PHL
:
17284 case OPC_MULEQ_S_W_PHR
:
17285 case OPC_MULQ_S_PH
:
17286 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17288 default: /* Invalid */
17289 MIPS_INVAL("MASK ADDU.QB");
17290 generate_exception_end(ctx
, EXCP_RI
);
17295 case OPC_CMPU_EQ_QB_DSP
:
17296 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17298 case OPC_PRECR_SRA_PH_W
:
17299 case OPC_PRECR_SRA_R_PH_W
:
17300 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17302 case OPC_PRECR_QB_PH
:
17303 case OPC_PRECRQ_QB_PH
:
17304 case OPC_PRECRQ_PH_W
:
17305 case OPC_PRECRQ_RS_PH_W
:
17306 case OPC_PRECRQU_S_QB_PH
:
17307 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17309 case OPC_CMPU_EQ_QB
:
17310 case OPC_CMPU_LT_QB
:
17311 case OPC_CMPU_LE_QB
:
17312 case OPC_CMP_EQ_PH
:
17313 case OPC_CMP_LT_PH
:
17314 case OPC_CMP_LE_PH
:
17315 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17317 case OPC_CMPGU_EQ_QB
:
17318 case OPC_CMPGU_LT_QB
:
17319 case OPC_CMPGU_LE_QB
:
17320 case OPC_CMPGDU_EQ_QB
:
17321 case OPC_CMPGDU_LT_QB
:
17322 case OPC_CMPGDU_LE_QB
:
17325 case OPC_PACKRL_PH
:
17326 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17328 default: /* Invalid */
17329 MIPS_INVAL("MASK CMPU.EQ.QB");
17330 generate_exception_end(ctx
, EXCP_RI
);
17334 case OPC_SHLL_QB_DSP
:
17335 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17337 case OPC_DPA_W_PH_DSP
:
17338 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17340 case OPC_DPAU_H_QBL
:
17341 case OPC_DPAU_H_QBR
:
17342 case OPC_DPSU_H_QBL
:
17343 case OPC_DPSU_H_QBR
:
17345 case OPC_DPAX_W_PH
:
17346 case OPC_DPAQ_S_W_PH
:
17347 case OPC_DPAQX_S_W_PH
:
17348 case OPC_DPAQX_SA_W_PH
:
17350 case OPC_DPSX_W_PH
:
17351 case OPC_DPSQ_S_W_PH
:
17352 case OPC_DPSQX_S_W_PH
:
17353 case OPC_DPSQX_SA_W_PH
:
17354 case OPC_MULSAQ_S_W_PH
:
17355 case OPC_DPAQ_SA_L_W
:
17356 case OPC_DPSQ_SA_L_W
:
17357 case OPC_MAQ_S_W_PHL
:
17358 case OPC_MAQ_S_W_PHR
:
17359 case OPC_MAQ_SA_W_PHL
:
17360 case OPC_MAQ_SA_W_PHR
:
17361 case OPC_MULSA_W_PH
:
17362 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17364 default: /* Invalid */
17365 MIPS_INVAL("MASK DPAW.PH");
17366 generate_exception_end(ctx
, EXCP_RI
);
17371 op2
= MASK_INSV(ctx
->opcode
);
17382 t0
= tcg_temp_new();
17383 t1
= tcg_temp_new();
17385 gen_load_gpr(t0
, rt
);
17386 gen_load_gpr(t1
, rs
);
17388 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17394 default: /* Invalid */
17395 MIPS_INVAL("MASK INSV");
17396 generate_exception_end(ctx
, EXCP_RI
);
17400 case OPC_APPEND_DSP
:
17401 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17403 case OPC_EXTR_W_DSP
:
17404 op2
= MASK_EXTR_W(ctx
->opcode
);
17408 case OPC_EXTR_RS_W
:
17410 case OPC_EXTRV_S_H
:
17412 case OPC_EXTRV_R_W
:
17413 case OPC_EXTRV_RS_W
:
17418 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17421 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17427 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17429 default: /* Invalid */
17430 MIPS_INVAL("MASK EXTR.W");
17431 generate_exception_end(ctx
, EXCP_RI
);
17435 #if defined(TARGET_MIPS64)
17436 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17437 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17438 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17439 check_insn(ctx
, INSN_LOONGSON2E
);
17440 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17442 case OPC_ABSQ_S_QH_DSP
:
17443 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17445 case OPC_PRECEQ_L_PWL
:
17446 case OPC_PRECEQ_L_PWR
:
17447 case OPC_PRECEQ_PW_QHL
:
17448 case OPC_PRECEQ_PW_QHR
:
17449 case OPC_PRECEQ_PW_QHLA
:
17450 case OPC_PRECEQ_PW_QHRA
:
17451 case OPC_PRECEQU_QH_OBL
:
17452 case OPC_PRECEQU_QH_OBR
:
17453 case OPC_PRECEQU_QH_OBLA
:
17454 case OPC_PRECEQU_QH_OBRA
:
17455 case OPC_PRECEU_QH_OBL
:
17456 case OPC_PRECEU_QH_OBR
:
17457 case OPC_PRECEU_QH_OBLA
:
17458 case OPC_PRECEU_QH_OBRA
:
17459 case OPC_ABSQ_S_OB
:
17460 case OPC_ABSQ_S_PW
:
17461 case OPC_ABSQ_S_QH
:
17462 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17470 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17472 default: /* Invalid */
17473 MIPS_INVAL("MASK ABSQ_S.QH");
17474 generate_exception_end(ctx
, EXCP_RI
);
17478 case OPC_ADDU_OB_DSP
:
17479 op2
= MASK_ADDU_OB(ctx
->opcode
);
17481 case OPC_RADDU_L_OB
:
17483 case OPC_SUBQ_S_PW
:
17485 case OPC_SUBQ_S_QH
:
17487 case OPC_SUBU_S_OB
:
17489 case OPC_SUBU_S_QH
:
17491 case OPC_SUBUH_R_OB
:
17493 case OPC_ADDQ_S_PW
:
17495 case OPC_ADDQ_S_QH
:
17497 case OPC_ADDU_S_OB
:
17499 case OPC_ADDU_S_QH
:
17501 case OPC_ADDUH_R_OB
:
17502 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17504 case OPC_MULEQ_S_PW_QHL
:
17505 case OPC_MULEQ_S_PW_QHR
:
17506 case OPC_MULEU_S_QH_OBL
:
17507 case OPC_MULEU_S_QH_OBR
:
17508 case OPC_MULQ_RS_QH
:
17509 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17511 default: /* Invalid */
17512 MIPS_INVAL("MASK ADDU.OB");
17513 generate_exception_end(ctx
, EXCP_RI
);
17517 case OPC_CMPU_EQ_OB_DSP
:
17518 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17520 case OPC_PRECR_SRA_QH_PW
:
17521 case OPC_PRECR_SRA_R_QH_PW
:
17522 /* Return value is rt. */
17523 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17525 case OPC_PRECR_OB_QH
:
17526 case OPC_PRECRQ_OB_QH
:
17527 case OPC_PRECRQ_PW_L
:
17528 case OPC_PRECRQ_QH_PW
:
17529 case OPC_PRECRQ_RS_QH_PW
:
17530 case OPC_PRECRQU_S_OB_QH
:
17531 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17533 case OPC_CMPU_EQ_OB
:
17534 case OPC_CMPU_LT_OB
:
17535 case OPC_CMPU_LE_OB
:
17536 case OPC_CMP_EQ_QH
:
17537 case OPC_CMP_LT_QH
:
17538 case OPC_CMP_LE_QH
:
17539 case OPC_CMP_EQ_PW
:
17540 case OPC_CMP_LT_PW
:
17541 case OPC_CMP_LE_PW
:
17542 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17544 case OPC_CMPGDU_EQ_OB
:
17545 case OPC_CMPGDU_LT_OB
:
17546 case OPC_CMPGDU_LE_OB
:
17547 case OPC_CMPGU_EQ_OB
:
17548 case OPC_CMPGU_LT_OB
:
17549 case OPC_CMPGU_LE_OB
:
17550 case OPC_PACKRL_PW
:
17554 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17556 default: /* Invalid */
17557 MIPS_INVAL("MASK CMPU_EQ.OB");
17558 generate_exception_end(ctx
, EXCP_RI
);
17562 case OPC_DAPPEND_DSP
:
17563 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17565 case OPC_DEXTR_W_DSP
:
17566 op2
= MASK_DEXTR_W(ctx
->opcode
);
17573 case OPC_DEXTR_R_L
:
17574 case OPC_DEXTR_RS_L
:
17576 case OPC_DEXTR_R_W
:
17577 case OPC_DEXTR_RS_W
:
17578 case OPC_DEXTR_S_H
:
17580 case OPC_DEXTRV_R_L
:
17581 case OPC_DEXTRV_RS_L
:
17582 case OPC_DEXTRV_S_H
:
17584 case OPC_DEXTRV_R_W
:
17585 case OPC_DEXTRV_RS_W
:
17586 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17591 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17593 default: /* Invalid */
17594 MIPS_INVAL("MASK EXTR.W");
17595 generate_exception_end(ctx
, EXCP_RI
);
17599 case OPC_DPAQ_W_QH_DSP
:
17600 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17602 case OPC_DPAU_H_OBL
:
17603 case OPC_DPAU_H_OBR
:
17604 case OPC_DPSU_H_OBL
:
17605 case OPC_DPSU_H_OBR
:
17607 case OPC_DPAQ_S_W_QH
:
17609 case OPC_DPSQ_S_W_QH
:
17610 case OPC_MULSAQ_S_W_QH
:
17611 case OPC_DPAQ_SA_L_PW
:
17612 case OPC_DPSQ_SA_L_PW
:
17613 case OPC_MULSAQ_S_L_PW
:
17614 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17616 case OPC_MAQ_S_W_QHLL
:
17617 case OPC_MAQ_S_W_QHLR
:
17618 case OPC_MAQ_S_W_QHRL
:
17619 case OPC_MAQ_S_W_QHRR
:
17620 case OPC_MAQ_SA_W_QHLL
:
17621 case OPC_MAQ_SA_W_QHLR
:
17622 case OPC_MAQ_SA_W_QHRL
:
17623 case OPC_MAQ_SA_W_QHRR
:
17624 case OPC_MAQ_S_L_PWL
:
17625 case OPC_MAQ_S_L_PWR
:
17630 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17632 default: /* Invalid */
17633 MIPS_INVAL("MASK DPAQ.W.QH");
17634 generate_exception_end(ctx
, EXCP_RI
);
17638 case OPC_DINSV_DSP
:
17639 op2
= MASK_INSV(ctx
->opcode
);
17650 t0
= tcg_temp_new();
17651 t1
= tcg_temp_new();
17653 gen_load_gpr(t0
, rt
);
17654 gen_load_gpr(t1
, rs
);
17656 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17662 default: /* Invalid */
17663 MIPS_INVAL("MASK DINSV");
17664 generate_exception_end(ctx
, EXCP_RI
);
17668 case OPC_SHLL_OB_DSP
:
17669 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17672 default: /* Invalid */
17673 MIPS_INVAL("special3_legacy");
17674 generate_exception_end(ctx
, EXCP_RI
);
17679 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17681 int rs
, rt
, rd
, sa
;
17684 rs
= (ctx
->opcode
>> 21) & 0x1f;
17685 rt
= (ctx
->opcode
>> 16) & 0x1f;
17686 rd
= (ctx
->opcode
>> 11) & 0x1f;
17687 sa
= (ctx
->opcode
>> 6) & 0x1f;
17689 op1
= MASK_SPECIAL3(ctx
->opcode
);
17693 check_insn(ctx
, ISA_MIPS32R2
);
17694 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17697 op2
= MASK_BSHFL(ctx
->opcode
);
17699 case OPC_ALIGN
... OPC_ALIGN_END
:
17701 check_insn(ctx
, ISA_MIPS32R6
);
17702 decode_opc_special3_r6(env
, ctx
);
17705 check_insn(ctx
, ISA_MIPS32R2
);
17706 gen_bshfl(ctx
, op2
, rt
, rd
);
17710 #if defined(TARGET_MIPS64)
17711 case OPC_DEXTM
... OPC_DEXT
:
17712 case OPC_DINSM
... OPC_DINS
:
17713 check_insn(ctx
, ISA_MIPS64R2
);
17714 check_mips_64(ctx
);
17715 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17718 op2
= MASK_DBSHFL(ctx
->opcode
);
17720 case OPC_DALIGN
... OPC_DALIGN_END
:
17722 check_insn(ctx
, ISA_MIPS32R6
);
17723 decode_opc_special3_r6(env
, ctx
);
17726 check_insn(ctx
, ISA_MIPS64R2
);
17727 check_mips_64(ctx
);
17728 op2
= MASK_DBSHFL(ctx
->opcode
);
17729 gen_bshfl(ctx
, op2
, rt
, rd
);
17735 gen_rdhwr(ctx
, rt
, rd
);
17738 check_insn(ctx
, ASE_MT
);
17740 TCGv t0
= tcg_temp_new();
17741 TCGv t1
= tcg_temp_new();
17743 gen_load_gpr(t0
, rt
);
17744 gen_load_gpr(t1
, rs
);
17745 gen_helper_fork(t0
, t1
);
17751 check_insn(ctx
, ASE_MT
);
17753 TCGv t0
= tcg_temp_new();
17755 gen_load_gpr(t0
, rs
);
17756 gen_helper_yield(t0
, cpu_env
, t0
);
17757 gen_store_gpr(t0
, rd
);
17762 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17763 decode_opc_special3_r6(env
, ctx
);
17765 decode_opc_special3_legacy(env
, ctx
);
17770 /* MIPS SIMD Architecture (MSA) */
17771 static inline int check_msa_access(DisasContext
*ctx
)
17773 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17774 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17775 generate_exception_end(ctx
, EXCP_RI
);
17779 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17780 if (ctx
->insn_flags
& ASE_MSA
) {
17781 generate_exception_end(ctx
, EXCP_MSADIS
);
17784 generate_exception_end(ctx
, EXCP_RI
);
17791 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17793 /* generates tcg ops to check if any element is 0 */
17794 /* Note this function only works with MSA_WRLEN = 128 */
17795 uint64_t eval_zero_or_big
= 0;
17796 uint64_t eval_big
= 0;
17797 TCGv_i64 t0
= tcg_temp_new_i64();
17798 TCGv_i64 t1
= tcg_temp_new_i64();
17801 eval_zero_or_big
= 0x0101010101010101ULL
;
17802 eval_big
= 0x8080808080808080ULL
;
17805 eval_zero_or_big
= 0x0001000100010001ULL
;
17806 eval_big
= 0x8000800080008000ULL
;
17809 eval_zero_or_big
= 0x0000000100000001ULL
;
17810 eval_big
= 0x8000000080000000ULL
;
17813 eval_zero_or_big
= 0x0000000000000001ULL
;
17814 eval_big
= 0x8000000000000000ULL
;
17817 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17818 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17819 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17820 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17821 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17822 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17823 tcg_gen_or_i64(t0
, t0
, t1
);
17824 /* if all bits are zero then all elements are not zero */
17825 /* if some bit is non-zero then some element is zero */
17826 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17827 tcg_gen_trunc_i64_tl(tresult
, t0
);
17828 tcg_temp_free_i64(t0
);
17829 tcg_temp_free_i64(t1
);
17832 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17834 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17835 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17836 int64_t s16
= (int16_t)ctx
->opcode
;
17838 check_msa_access(ctx
);
17840 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17841 generate_exception_end(ctx
, EXCP_RI
);
17848 TCGv_i64 t0
= tcg_temp_new_i64();
17849 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17850 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17851 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17852 tcg_gen_trunc_i64_tl(bcond
, t0
);
17853 tcg_temp_free_i64(t0
);
17860 gen_check_zero_element(bcond
, df
, wt
);
17866 gen_check_zero_element(bcond
, df
, wt
);
17867 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17871 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17873 ctx
->hflags
|= MIPS_HFLAG_BC
;
17874 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17877 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17879 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17880 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17881 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17882 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17884 TCGv_i32 twd
= tcg_const_i32(wd
);
17885 TCGv_i32 tws
= tcg_const_i32(ws
);
17886 TCGv_i32 ti8
= tcg_const_i32(i8
);
17888 switch (MASK_MSA_I8(ctx
->opcode
)) {
17890 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17893 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17896 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17899 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17902 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17905 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17908 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17914 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17915 if (df
== DF_DOUBLE
) {
17916 generate_exception_end(ctx
, EXCP_RI
);
17918 TCGv_i32 tdf
= tcg_const_i32(df
);
17919 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17920 tcg_temp_free_i32(tdf
);
17925 MIPS_INVAL("MSA instruction");
17926 generate_exception_end(ctx
, EXCP_RI
);
17930 tcg_temp_free_i32(twd
);
17931 tcg_temp_free_i32(tws
);
17932 tcg_temp_free_i32(ti8
);
17935 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17937 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17938 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17939 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17940 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17941 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17942 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17944 TCGv_i32 tdf
= tcg_const_i32(df
);
17945 TCGv_i32 twd
= tcg_const_i32(wd
);
17946 TCGv_i32 tws
= tcg_const_i32(ws
);
17947 TCGv_i32 timm
= tcg_temp_new_i32();
17948 tcg_gen_movi_i32(timm
, u5
);
17950 switch (MASK_MSA_I5(ctx
->opcode
)) {
17952 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17955 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17957 case OPC_MAXI_S_df
:
17958 tcg_gen_movi_i32(timm
, s5
);
17959 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17961 case OPC_MAXI_U_df
:
17962 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17964 case OPC_MINI_S_df
:
17965 tcg_gen_movi_i32(timm
, s5
);
17966 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17968 case OPC_MINI_U_df
:
17969 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17972 tcg_gen_movi_i32(timm
, s5
);
17973 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17975 case OPC_CLTI_S_df
:
17976 tcg_gen_movi_i32(timm
, s5
);
17977 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17979 case OPC_CLTI_U_df
:
17980 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17982 case OPC_CLEI_S_df
:
17983 tcg_gen_movi_i32(timm
, s5
);
17984 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17986 case OPC_CLEI_U_df
:
17987 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17991 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
17992 tcg_gen_movi_i32(timm
, s10
);
17993 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
17997 MIPS_INVAL("MSA instruction");
17998 generate_exception_end(ctx
, EXCP_RI
);
18002 tcg_temp_free_i32(tdf
);
18003 tcg_temp_free_i32(twd
);
18004 tcg_temp_free_i32(tws
);
18005 tcg_temp_free_i32(timm
);
18008 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18010 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18011 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18012 uint32_t df
= 0, m
= 0;
18013 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18014 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18021 if ((dfm
& 0x40) == 0x00) {
18024 } else if ((dfm
& 0x60) == 0x40) {
18027 } else if ((dfm
& 0x70) == 0x60) {
18030 } else if ((dfm
& 0x78) == 0x70) {
18034 generate_exception_end(ctx
, EXCP_RI
);
18038 tdf
= tcg_const_i32(df
);
18039 tm
= tcg_const_i32(m
);
18040 twd
= tcg_const_i32(wd
);
18041 tws
= tcg_const_i32(ws
);
18043 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18045 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18048 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18051 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18054 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18057 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18060 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18062 case OPC_BINSLI_df
:
18063 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18065 case OPC_BINSRI_df
:
18066 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18069 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18072 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18075 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18078 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18081 MIPS_INVAL("MSA instruction");
18082 generate_exception_end(ctx
, EXCP_RI
);
18086 tcg_temp_free_i32(tdf
);
18087 tcg_temp_free_i32(tm
);
18088 tcg_temp_free_i32(twd
);
18089 tcg_temp_free_i32(tws
);
18092 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18094 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18095 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18096 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18097 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18098 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18100 TCGv_i32 tdf
= tcg_const_i32(df
);
18101 TCGv_i32 twd
= tcg_const_i32(wd
);
18102 TCGv_i32 tws
= tcg_const_i32(ws
);
18103 TCGv_i32 twt
= tcg_const_i32(wt
);
18105 switch (MASK_MSA_3R(ctx
->opcode
)) {
18107 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18110 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18113 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18116 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18118 case OPC_SUBS_S_df
:
18119 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18122 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18125 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18128 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18131 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18134 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18136 case OPC_ADDS_A_df
:
18137 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18139 case OPC_SUBS_U_df
:
18140 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18143 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18146 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18149 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18152 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18155 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18158 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18160 case OPC_ADDS_S_df
:
18161 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18163 case OPC_SUBSUS_U_df
:
18164 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18167 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18170 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18173 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18176 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18179 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18182 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18184 case OPC_ADDS_U_df
:
18185 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18187 case OPC_SUBSUU_S_df
:
18188 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18191 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18194 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18197 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18200 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18203 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18205 case OPC_ASUB_S_df
:
18206 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18209 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18212 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18215 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18218 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18221 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18224 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 case OPC_ASUB_U_df
:
18227 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18230 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18233 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18236 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18239 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18241 case OPC_AVER_S_df
:
18242 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18245 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18248 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18251 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18254 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18256 case OPC_AVER_U_df
:
18257 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18260 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18263 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18266 case OPC_DOTP_S_df
:
18267 case OPC_DOTP_U_df
:
18268 case OPC_DPADD_S_df
:
18269 case OPC_DPADD_U_df
:
18270 case OPC_DPSUB_S_df
:
18271 case OPC_HADD_S_df
:
18272 case OPC_DPSUB_U_df
:
18273 case OPC_HADD_U_df
:
18274 case OPC_HSUB_S_df
:
18275 case OPC_HSUB_U_df
:
18276 if (df
== DF_BYTE
) {
18277 generate_exception_end(ctx
, EXCP_RI
);
18280 switch (MASK_MSA_3R(ctx
->opcode
)) {
18281 case OPC_DOTP_S_df
:
18282 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18284 case OPC_DOTP_U_df
:
18285 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18287 case OPC_DPADD_S_df
:
18288 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18290 case OPC_DPADD_U_df
:
18291 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18293 case OPC_DPSUB_S_df
:
18294 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18296 case OPC_HADD_S_df
:
18297 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18299 case OPC_DPSUB_U_df
:
18300 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18302 case OPC_HADD_U_df
:
18303 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18305 case OPC_HSUB_S_df
:
18306 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18308 case OPC_HSUB_U_df
:
18309 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18314 MIPS_INVAL("MSA instruction");
18315 generate_exception_end(ctx
, EXCP_RI
);
18318 tcg_temp_free_i32(twd
);
18319 tcg_temp_free_i32(tws
);
18320 tcg_temp_free_i32(twt
);
18321 tcg_temp_free_i32(tdf
);
18324 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18326 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18327 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18328 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18329 TCGv telm
= tcg_temp_new();
18330 TCGv_i32 tsr
= tcg_const_i32(source
);
18331 TCGv_i32 tdt
= tcg_const_i32(dest
);
18333 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18335 gen_load_gpr(telm
, source
);
18336 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18339 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18340 gen_store_gpr(telm
, dest
);
18343 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18346 MIPS_INVAL("MSA instruction");
18347 generate_exception_end(ctx
, EXCP_RI
);
18351 tcg_temp_free(telm
);
18352 tcg_temp_free_i32(tdt
);
18353 tcg_temp_free_i32(tsr
);
18356 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18359 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18360 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18361 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18363 TCGv_i32 tws
= tcg_const_i32(ws
);
18364 TCGv_i32 twd
= tcg_const_i32(wd
);
18365 TCGv_i32 tn
= tcg_const_i32(n
);
18366 TCGv_i32 tdf
= tcg_const_i32(df
);
18368 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18370 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18372 case OPC_SPLATI_df
:
18373 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18376 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18378 case OPC_COPY_S_df
:
18379 case OPC_COPY_U_df
:
18380 case OPC_INSERT_df
:
18381 #if !defined(TARGET_MIPS64)
18382 /* Double format valid only for MIPS64 */
18383 if (df
== DF_DOUBLE
) {
18384 generate_exception_end(ctx
, EXCP_RI
);
18388 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18389 case OPC_COPY_S_df
:
18390 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18392 case OPC_COPY_U_df
:
18393 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18395 case OPC_INSERT_df
:
18396 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18401 MIPS_INVAL("MSA instruction");
18402 generate_exception_end(ctx
, EXCP_RI
);
18404 tcg_temp_free_i32(twd
);
18405 tcg_temp_free_i32(tws
);
18406 tcg_temp_free_i32(tn
);
18407 tcg_temp_free_i32(tdf
);
18410 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18412 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18413 uint32_t df
= 0, n
= 0;
18415 if ((dfn
& 0x30) == 0x00) {
18418 } else if ((dfn
& 0x38) == 0x20) {
18421 } else if ((dfn
& 0x3c) == 0x30) {
18424 } else if ((dfn
& 0x3e) == 0x38) {
18427 } else if (dfn
== 0x3E) {
18428 /* CTCMSA, CFCMSA, MOVE.V */
18429 gen_msa_elm_3e(env
, ctx
);
18432 generate_exception_end(ctx
, EXCP_RI
);
18436 gen_msa_elm_df(env
, ctx
, df
, n
);
18439 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18441 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18442 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18443 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18444 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18445 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18447 TCGv_i32 twd
= tcg_const_i32(wd
);
18448 TCGv_i32 tws
= tcg_const_i32(ws
);
18449 TCGv_i32 twt
= tcg_const_i32(wt
);
18450 TCGv_i32 tdf
= tcg_temp_new_i32();
18452 /* adjust df value for floating-point instruction */
18453 tcg_gen_movi_i32(tdf
, df
+ 2);
18455 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18457 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18460 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18463 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18466 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18469 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18472 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18475 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18478 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18481 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18484 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18487 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18490 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18493 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18496 tcg_gen_movi_i32(tdf
, df
+ 1);
18497 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18500 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18503 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18505 case OPC_MADD_Q_df
:
18506 tcg_gen_movi_i32(tdf
, df
+ 1);
18507 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18510 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18512 case OPC_MSUB_Q_df
:
18513 tcg_gen_movi_i32(tdf
, df
+ 1);
18514 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18517 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18520 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18523 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18529 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18535 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18538 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18541 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18544 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18547 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18550 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18553 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 case OPC_MULR_Q_df
:
18556 tcg_gen_movi_i32(tdf
, df
+ 1);
18557 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18560 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18562 case OPC_FMIN_A_df
:
18563 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18565 case OPC_MADDR_Q_df
:
18566 tcg_gen_movi_i32(tdf
, df
+ 1);
18567 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18575 case OPC_MSUBR_Q_df
:
18576 tcg_gen_movi_i32(tdf
, df
+ 1);
18577 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18580 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18582 case OPC_FMAX_A_df
:
18583 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18586 MIPS_INVAL("MSA instruction");
18587 generate_exception_end(ctx
, EXCP_RI
);
18591 tcg_temp_free_i32(twd
);
18592 tcg_temp_free_i32(tws
);
18593 tcg_temp_free_i32(twt
);
18594 tcg_temp_free_i32(tdf
);
18597 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18599 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18600 (op & (0x7 << 18)))
18601 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18602 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18603 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18604 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18605 TCGv_i32 twd
= tcg_const_i32(wd
);
18606 TCGv_i32 tws
= tcg_const_i32(ws
);
18607 TCGv_i32 twt
= tcg_const_i32(wt
);
18608 TCGv_i32 tdf
= tcg_const_i32(df
);
18610 switch (MASK_MSA_2R(ctx
->opcode
)) {
18612 #if !defined(TARGET_MIPS64)
18613 /* Double format valid only for MIPS64 */
18614 if (df
== DF_DOUBLE
) {
18615 generate_exception_end(ctx
, EXCP_RI
);
18619 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18622 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18625 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18628 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18631 MIPS_INVAL("MSA instruction");
18632 generate_exception_end(ctx
, EXCP_RI
);
18636 tcg_temp_free_i32(twd
);
18637 tcg_temp_free_i32(tws
);
18638 tcg_temp_free_i32(twt
);
18639 tcg_temp_free_i32(tdf
);
18642 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18644 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18645 (op & (0xf << 17)))
18646 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18647 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18648 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18649 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18650 TCGv_i32 twd
= tcg_const_i32(wd
);
18651 TCGv_i32 tws
= tcg_const_i32(ws
);
18652 TCGv_i32 twt
= tcg_const_i32(wt
);
18653 /* adjust df value for floating-point instruction */
18654 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18656 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18657 case OPC_FCLASS_df
:
18658 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18660 case OPC_FTRUNC_S_df
:
18661 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18663 case OPC_FTRUNC_U_df
:
18664 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18667 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18669 case OPC_FRSQRT_df
:
18670 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18673 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18676 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18679 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18681 case OPC_FEXUPL_df
:
18682 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18684 case OPC_FEXUPR_df
:
18685 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18688 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18691 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18693 case OPC_FTINT_S_df
:
18694 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18696 case OPC_FTINT_U_df
:
18697 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18699 case OPC_FFINT_S_df
:
18700 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18702 case OPC_FFINT_U_df
:
18703 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18707 tcg_temp_free_i32(twd
);
18708 tcg_temp_free_i32(tws
);
18709 tcg_temp_free_i32(twt
);
18710 tcg_temp_free_i32(tdf
);
18713 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18715 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18716 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18717 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18718 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18719 TCGv_i32 twd
= tcg_const_i32(wd
);
18720 TCGv_i32 tws
= tcg_const_i32(ws
);
18721 TCGv_i32 twt
= tcg_const_i32(wt
);
18723 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18725 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18728 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18731 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18734 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18737 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18740 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18743 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18746 MIPS_INVAL("MSA instruction");
18747 generate_exception_end(ctx
, EXCP_RI
);
18751 tcg_temp_free_i32(twd
);
18752 tcg_temp_free_i32(tws
);
18753 tcg_temp_free_i32(twt
);
18756 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18758 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18766 gen_msa_vec_v(env
, ctx
);
18769 gen_msa_2r(env
, ctx
);
18772 gen_msa_2rf(env
, ctx
);
18775 MIPS_INVAL("MSA instruction");
18776 generate_exception_end(ctx
, EXCP_RI
);
18781 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18783 uint32_t opcode
= ctx
->opcode
;
18784 check_insn(ctx
, ASE_MSA
);
18785 check_msa_access(ctx
);
18787 switch (MASK_MSA_MINOR(opcode
)) {
18788 case OPC_MSA_I8_00
:
18789 case OPC_MSA_I8_01
:
18790 case OPC_MSA_I8_02
:
18791 gen_msa_i8(env
, ctx
);
18793 case OPC_MSA_I5_06
:
18794 case OPC_MSA_I5_07
:
18795 gen_msa_i5(env
, ctx
);
18797 case OPC_MSA_BIT_09
:
18798 case OPC_MSA_BIT_0A
:
18799 gen_msa_bit(env
, ctx
);
18801 case OPC_MSA_3R_0D
:
18802 case OPC_MSA_3R_0E
:
18803 case OPC_MSA_3R_0F
:
18804 case OPC_MSA_3R_10
:
18805 case OPC_MSA_3R_11
:
18806 case OPC_MSA_3R_12
:
18807 case OPC_MSA_3R_13
:
18808 case OPC_MSA_3R_14
:
18809 case OPC_MSA_3R_15
:
18810 gen_msa_3r(env
, ctx
);
18813 gen_msa_elm(env
, ctx
);
18815 case OPC_MSA_3RF_1A
:
18816 case OPC_MSA_3RF_1B
:
18817 case OPC_MSA_3RF_1C
:
18818 gen_msa_3rf(env
, ctx
);
18821 gen_msa_vec(env
, ctx
);
18832 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18833 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18834 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18835 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18837 TCGv_i32 twd
= tcg_const_i32(wd
);
18838 TCGv taddr
= tcg_temp_new();
18839 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18841 switch (MASK_MSA_MINOR(opcode
)) {
18843 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18846 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18849 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18852 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18855 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18858 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18861 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18864 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18868 tcg_temp_free_i32(twd
);
18869 tcg_temp_free(taddr
);
18873 MIPS_INVAL("MSA instruction");
18874 generate_exception_end(ctx
, EXCP_RI
);
18880 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18883 int rs
, rt
, rd
, sa
;
18887 /* make sure instructions are on a word boundary */
18888 if (ctx
->pc
& 0x3) {
18889 env
->CP0_BadVAddr
= ctx
->pc
;
18890 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18894 /* Handle blikely not taken case */
18895 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18896 TCGLabel
*l1
= gen_new_label();
18898 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18899 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18900 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18904 op
= MASK_OP_MAJOR(ctx
->opcode
);
18905 rs
= (ctx
->opcode
>> 21) & 0x1f;
18906 rt
= (ctx
->opcode
>> 16) & 0x1f;
18907 rd
= (ctx
->opcode
>> 11) & 0x1f;
18908 sa
= (ctx
->opcode
>> 6) & 0x1f;
18909 imm
= (int16_t)ctx
->opcode
;
18912 decode_opc_special(env
, ctx
);
18915 decode_opc_special2_legacy(env
, ctx
);
18918 decode_opc_special3(env
, ctx
);
18921 op1
= MASK_REGIMM(ctx
->opcode
);
18923 case OPC_BLTZL
: /* REGIMM branches */
18927 check_insn(ctx
, ISA_MIPS2
);
18928 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18932 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18936 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18938 /* OPC_NAL, OPC_BAL */
18939 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18941 generate_exception_end(ctx
, EXCP_RI
);
18944 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18947 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18949 check_insn(ctx
, ISA_MIPS2
);
18950 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18951 gen_trap(ctx
, op1
, rs
, -1, imm
);
18954 check_insn(ctx
, ISA_MIPS32R2
);
18955 /* Break the TB to be able to sync copied instructions
18957 ctx
->bstate
= BS_STOP
;
18959 case OPC_BPOSGE32
: /* MIPS DSP branch */
18960 #if defined(TARGET_MIPS64)
18964 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
18966 #if defined(TARGET_MIPS64)
18968 check_insn(ctx
, ISA_MIPS32R6
);
18969 check_mips_64(ctx
);
18971 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
18975 check_insn(ctx
, ISA_MIPS32R6
);
18976 check_mips_64(ctx
);
18978 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
18982 default: /* Invalid */
18983 MIPS_INVAL("regimm");
18984 generate_exception_end(ctx
, EXCP_RI
);
18989 check_cp0_enabled(ctx
);
18990 op1
= MASK_CP0(ctx
->opcode
);
18998 #if defined(TARGET_MIPS64)
19002 #ifndef CONFIG_USER_ONLY
19003 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19004 #endif /* !CONFIG_USER_ONLY */
19006 case OPC_C0_FIRST
... OPC_C0_LAST
:
19007 #ifndef CONFIG_USER_ONLY
19008 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19009 #endif /* !CONFIG_USER_ONLY */
19012 #ifndef CONFIG_USER_ONLY
19015 TCGv t0
= tcg_temp_new();
19017 op2
= MASK_MFMC0(ctx
->opcode
);
19020 check_insn(ctx
, ASE_MT
);
19021 gen_helper_dmt(t0
);
19022 gen_store_gpr(t0
, rt
);
19025 check_insn(ctx
, ASE_MT
);
19026 gen_helper_emt(t0
);
19027 gen_store_gpr(t0
, rt
);
19030 check_insn(ctx
, ASE_MT
);
19031 gen_helper_dvpe(t0
, cpu_env
);
19032 gen_store_gpr(t0
, rt
);
19035 check_insn(ctx
, ASE_MT
);
19036 gen_helper_evpe(t0
, cpu_env
);
19037 gen_store_gpr(t0
, rt
);
19040 check_insn(ctx
, ISA_MIPS32R2
);
19041 save_cpu_state(ctx
, 1);
19042 gen_helper_di(t0
, cpu_env
);
19043 gen_store_gpr(t0
, rt
);
19044 /* Stop translation as we may have switched
19045 the execution mode. */
19046 ctx
->bstate
= BS_STOP
;
19049 check_insn(ctx
, ISA_MIPS32R2
);
19050 save_cpu_state(ctx
, 1);
19051 gen_helper_ei(t0
, cpu_env
);
19052 gen_store_gpr(t0
, rt
);
19053 /* Stop translation as we may have switched
19054 the execution mode. */
19055 ctx
->bstate
= BS_STOP
;
19057 default: /* Invalid */
19058 MIPS_INVAL("mfmc0");
19059 generate_exception_end(ctx
, EXCP_RI
);
19064 #endif /* !CONFIG_USER_ONLY */
19067 check_insn(ctx
, ISA_MIPS32R2
);
19068 gen_load_srsgpr(rt
, rd
);
19071 check_insn(ctx
, ISA_MIPS32R2
);
19072 gen_store_srsgpr(rt
, rd
);
19076 generate_exception_end(ctx
, EXCP_RI
);
19080 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19081 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19082 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19083 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19086 /* Arithmetic with immediate opcode */
19087 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19091 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19093 case OPC_SLTI
: /* Set on less than with immediate opcode */
19095 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19097 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19098 case OPC_LUI
: /* OPC_AUI */
19101 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19103 case OPC_J
... OPC_JAL
: /* Jump */
19104 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19105 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19108 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19109 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19111 generate_exception_end(ctx
, EXCP_RI
);
19114 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19115 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19118 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19121 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19122 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19124 generate_exception_end(ctx
, EXCP_RI
);
19127 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19128 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19131 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19134 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19137 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19139 check_insn(ctx
, ISA_MIPS32R6
);
19140 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19141 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19144 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19147 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19149 check_insn(ctx
, ISA_MIPS32R6
);
19150 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19151 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19156 check_insn(ctx
, ISA_MIPS2
);
19157 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19161 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19163 case OPC_LL
: /* Load and stores */
19164 check_insn(ctx
, ISA_MIPS2
);
19168 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19170 case OPC_LB
... OPC_LH
:
19171 case OPC_LW
... OPC_LHU
:
19172 gen_ld(ctx
, op
, rt
, rs
, imm
);
19176 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19178 case OPC_SB
... OPC_SH
:
19180 gen_st(ctx
, op
, rt
, rs
, imm
);
19183 check_insn(ctx
, ISA_MIPS2
);
19184 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19185 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19188 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19189 check_cp0_enabled(ctx
);
19190 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19191 /* Treat as NOP. */
19194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19195 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19196 /* Treat as NOP. */
19199 /* Floating point (COP1). */
19204 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19208 op1
= MASK_CP1(ctx
->opcode
);
19213 check_cp1_enabled(ctx
);
19214 check_insn(ctx
, ISA_MIPS32R2
);
19219 check_cp1_enabled(ctx
);
19220 gen_cp1(ctx
, op1
, rt
, rd
);
19222 #if defined(TARGET_MIPS64)
19225 check_cp1_enabled(ctx
);
19226 check_insn(ctx
, ISA_MIPS3
);
19227 check_mips_64(ctx
);
19228 gen_cp1(ctx
, op1
, rt
, rd
);
19231 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19232 check_cp1_enabled(ctx
);
19233 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19235 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19240 check_insn(ctx
, ASE_MIPS3D
);
19241 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19242 (rt
>> 2) & 0x7, imm
<< 2);
19246 check_cp1_enabled(ctx
);
19247 check_insn(ctx
, ISA_MIPS32R6
);
19248 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19252 check_cp1_enabled(ctx
);
19253 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19255 check_insn(ctx
, ASE_MIPS3D
);
19258 check_cp1_enabled(ctx
);
19259 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19260 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19261 (rt
>> 2) & 0x7, imm
<< 2);
19268 check_cp1_enabled(ctx
);
19269 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19275 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19276 check_cp1_enabled(ctx
);
19277 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19279 case R6_OPC_CMP_AF_S
:
19280 case R6_OPC_CMP_UN_S
:
19281 case R6_OPC_CMP_EQ_S
:
19282 case R6_OPC_CMP_UEQ_S
:
19283 case R6_OPC_CMP_LT_S
:
19284 case R6_OPC_CMP_ULT_S
:
19285 case R6_OPC_CMP_LE_S
:
19286 case R6_OPC_CMP_ULE_S
:
19287 case R6_OPC_CMP_SAF_S
:
19288 case R6_OPC_CMP_SUN_S
:
19289 case R6_OPC_CMP_SEQ_S
:
19290 case R6_OPC_CMP_SEUQ_S
:
19291 case R6_OPC_CMP_SLT_S
:
19292 case R6_OPC_CMP_SULT_S
:
19293 case R6_OPC_CMP_SLE_S
:
19294 case R6_OPC_CMP_SULE_S
:
19295 case R6_OPC_CMP_OR_S
:
19296 case R6_OPC_CMP_UNE_S
:
19297 case R6_OPC_CMP_NE_S
:
19298 case R6_OPC_CMP_SOR_S
:
19299 case R6_OPC_CMP_SUNE_S
:
19300 case R6_OPC_CMP_SNE_S
:
19301 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19303 case R6_OPC_CMP_AF_D
:
19304 case R6_OPC_CMP_UN_D
:
19305 case R6_OPC_CMP_EQ_D
:
19306 case R6_OPC_CMP_UEQ_D
:
19307 case R6_OPC_CMP_LT_D
:
19308 case R6_OPC_CMP_ULT_D
:
19309 case R6_OPC_CMP_LE_D
:
19310 case R6_OPC_CMP_ULE_D
:
19311 case R6_OPC_CMP_SAF_D
:
19312 case R6_OPC_CMP_SUN_D
:
19313 case R6_OPC_CMP_SEQ_D
:
19314 case R6_OPC_CMP_SEUQ_D
:
19315 case R6_OPC_CMP_SLT_D
:
19316 case R6_OPC_CMP_SULT_D
:
19317 case R6_OPC_CMP_SLE_D
:
19318 case R6_OPC_CMP_SULE_D
:
19319 case R6_OPC_CMP_OR_D
:
19320 case R6_OPC_CMP_UNE_D
:
19321 case R6_OPC_CMP_NE_D
:
19322 case R6_OPC_CMP_SOR_D
:
19323 case R6_OPC_CMP_SUNE_D
:
19324 case R6_OPC_CMP_SNE_D
:
19325 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19328 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19329 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19334 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19349 check_insn(ctx
, ASE_MSA
);
19350 gen_msa_branch(env
, ctx
, op1
);
19354 generate_exception_end(ctx
, EXCP_RI
);
19359 /* Compact branches [R6] and COP2 [non-R6] */
19360 case OPC_BC
: /* OPC_LWC2 */
19361 case OPC_BALC
: /* OPC_SWC2 */
19362 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19363 /* OPC_BC, OPC_BALC */
19364 gen_compute_compact_branch(ctx
, op
, 0, 0,
19365 sextract32(ctx
->opcode
<< 2, 0, 28));
19367 /* OPC_LWC2, OPC_SWC2 */
19368 /* COP2: Not implemented. */
19369 generate_exception_err(ctx
, EXCP_CpU
, 2);
19372 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19373 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19374 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19376 /* OPC_BEQZC, OPC_BNEZC */
19377 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19378 sextract32(ctx
->opcode
<< 2, 0, 23));
19380 /* OPC_JIC, OPC_JIALC */
19381 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19384 /* OPC_LWC2, OPC_SWC2 */
19385 /* COP2: Not implemented. */
19386 generate_exception_err(ctx
, EXCP_CpU
, 2);
19390 check_insn(ctx
, INSN_LOONGSON2F
);
19391 /* Note that these instructions use different fields. */
19392 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19397 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19398 check_cp1_enabled(ctx
);
19399 op1
= MASK_CP3(ctx
->opcode
);
19403 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19409 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19410 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19413 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19414 /* Treat as NOP. */
19417 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19431 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19432 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19436 generate_exception_end(ctx
, EXCP_RI
);
19440 generate_exception_err(ctx
, EXCP_CpU
, 1);
19444 #if defined(TARGET_MIPS64)
19445 /* MIPS64 opcodes */
19446 case OPC_LDL
... OPC_LDR
:
19448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19452 check_insn(ctx
, ISA_MIPS3
);
19453 check_mips_64(ctx
);
19454 gen_ld(ctx
, op
, rt
, rs
, imm
);
19456 case OPC_SDL
... OPC_SDR
:
19457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19460 check_insn(ctx
, ISA_MIPS3
);
19461 check_mips_64(ctx
);
19462 gen_st(ctx
, op
, rt
, rs
, imm
);
19465 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19466 check_insn(ctx
, ISA_MIPS3
);
19467 check_mips_64(ctx
);
19468 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19470 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19471 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19472 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19473 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19476 check_insn(ctx
, ISA_MIPS3
);
19477 check_mips_64(ctx
);
19478 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19482 check_insn(ctx
, ISA_MIPS3
);
19483 check_mips_64(ctx
);
19484 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19487 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19488 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19489 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19491 MIPS_INVAL("major opcode");
19492 generate_exception_end(ctx
, EXCP_RI
);
19496 case OPC_DAUI
: /* OPC_JALX */
19497 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19498 #if defined(TARGET_MIPS64)
19500 check_mips_64(ctx
);
19502 generate_exception(ctx
, EXCP_RI
);
19503 } else if (rt
!= 0) {
19504 TCGv t0
= tcg_temp_new();
19505 gen_load_gpr(t0
, rs
);
19506 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19510 generate_exception_end(ctx
, EXCP_RI
);
19511 MIPS_INVAL("major opcode");
19515 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19516 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19517 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19520 case OPC_MSA
: /* OPC_MDMX */
19521 /* MDMX: Not implemented. */
19525 check_insn(ctx
, ISA_MIPS32R6
);
19526 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19528 default: /* Invalid */
19529 MIPS_INVAL("major opcode");
19530 generate_exception_end(ctx
, EXCP_RI
);
19535 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19537 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19538 CPUState
*cs
= CPU(cpu
);
19540 target_ulong pc_start
;
19541 target_ulong next_page_start
;
19548 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19551 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19552 ctx
.insn_flags
= env
->insn_flags
;
19553 ctx
.CP0_Config1
= env
->CP0_Config1
;
19555 ctx
.bstate
= BS_NONE
;
19557 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19558 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19559 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19560 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19561 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19562 ctx
.PAMask
= env
->PAMask
;
19563 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19564 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19565 /* Restore delay slot state from the tb context. */
19566 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19567 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19568 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19569 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19570 restore_cpu_state(env
, &ctx
);
19571 #ifdef CONFIG_USER_ONLY
19572 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19574 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19576 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19577 MO_UNALN
: MO_ALIGN
;
19579 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19580 if (max_insns
== 0) {
19581 max_insns
= CF_COUNT_MASK
;
19583 if (max_insns
> TCG_MAX_INSNS
) {
19584 max_insns
= TCG_MAX_INSNS
;
19587 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19589 while (ctx
.bstate
== BS_NONE
) {
19590 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19593 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19594 save_cpu_state(&ctx
, 1);
19595 ctx
.bstate
= BS_BRANCH
;
19596 gen_helper_raise_exception_debug(cpu_env
);
19597 /* Include the breakpoint location or the tb won't
19598 * be flushed when it must be. */
19600 goto done_generating
;
19603 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19607 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19608 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19609 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19611 decode_opc(env
, &ctx
);
19612 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19613 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19614 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19615 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19616 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19617 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19619 generate_exception_end(&ctx
, EXCP_RI
);
19623 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19624 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19625 MIPS_HFLAG_FBNSLOT
))) {
19626 /* force to generate branch as there is neither delay nor
19630 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19631 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19632 /* Force to generate branch as microMIPS R6 doesn't restrict
19633 branches in the forbidden slot. */
19638 gen_branch(&ctx
, insn_bytes
);
19640 ctx
.pc
+= insn_bytes
;
19642 /* Execute a branch and its delay slot as a single instruction.
19643 This is what GDB expects and is consistent with what the
19644 hardware does (e.g. if a delay slot instruction faults, the
19645 reported PC is the PC of the branch). */
19646 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19650 if (ctx
.pc
>= next_page_start
) {
19654 if (tcg_op_buf_full()) {
19658 if (num_insns
>= max_insns
)
19664 if (tb
->cflags
& CF_LAST_IO
) {
19667 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19668 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19669 gen_helper_raise_exception_debug(cpu_env
);
19671 switch (ctx
.bstate
) {
19673 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19676 save_cpu_state(&ctx
, 0);
19677 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19680 tcg_gen_exit_tb(0);
19688 gen_tb_end(tb
, num_insns
);
19690 tb
->size
= ctx
.pc
- pc_start
;
19691 tb
->icount
= num_insns
;
19695 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19696 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19697 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19703 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19707 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19709 #define printfpr(fp) \
19712 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19713 " fd:%13g fs:%13g psu: %13g\n", \
19714 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19715 (double)(fp)->fd, \
19716 (double)(fp)->fs[FP_ENDIAN_IDX], \
19717 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19720 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19721 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19722 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19723 " fd:%13g fs:%13g psu:%13g\n", \
19724 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19726 (double)tmp.fs[FP_ENDIAN_IDX], \
19727 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19732 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19733 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19734 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19735 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19736 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19737 printfpr(&env
->active_fpu
.fpr
[i
]);
19743 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19746 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19747 CPUMIPSState
*env
= &cpu
->env
;
19750 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19751 " LO=0x" TARGET_FMT_lx
" ds %04x "
19752 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19753 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19754 env
->hflags
, env
->btarget
, env
->bcond
);
19755 for (i
= 0; i
< 32; i
++) {
19757 cpu_fprintf(f
, "GPR%02d:", i
);
19758 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19760 cpu_fprintf(f
, "\n");
19763 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19764 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19765 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19767 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19768 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19769 env
->CP0_Config2
, env
->CP0_Config3
);
19770 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19771 env
->CP0_Config4
, env
->CP0_Config5
);
19772 if (env
->hflags
& MIPS_HFLAG_FPU
)
19773 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19776 void mips_tcg_init(void)
19781 /* Initialize various static tables. */
19785 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19786 TCGV_UNUSED(cpu_gpr
[0]);
19787 for (i
= 1; i
< 32; i
++)
19788 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
19789 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19792 for (i
= 0; i
< 32; i
++) {
19793 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19795 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2]);
19796 /* The scalar floating-point unit (FPU) registers are mapped on
19797 * the MSA vector registers. */
19798 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19799 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19800 msa_wr_d
[i
* 2 + 1] =
19801 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2 + 1]);
19804 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
19805 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19806 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19807 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
19808 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19810 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
19811 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19814 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
19815 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19817 bcond
= tcg_global_mem_new(TCG_AREG0
,
19818 offsetof(CPUMIPSState
, bcond
), "bcond");
19819 btarget
= tcg_global_mem_new(TCG_AREG0
,
19820 offsetof(CPUMIPSState
, btarget
), "btarget");
19821 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
19822 offsetof(CPUMIPSState
, hflags
), "hflags");
19824 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
19825 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19827 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
19828 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19834 #include "translate_init.c"
19836 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19840 const mips_def_t
*def
;
19842 def
= cpu_mips_find_by_name(cpu_model
);
19845 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19847 env
->cpu_model
= def
;
19849 #ifndef CONFIG_USER_ONLY
19850 mmu_init(env
, def
);
19852 fpu_init(env
, def
);
19853 mvp_init(env
, def
);
19855 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19860 void cpu_state_reset(CPUMIPSState
*env
)
19862 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19863 CPUState
*cs
= CPU(cpu
);
19865 /* Reset registers to their default values */
19866 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19867 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19868 #ifdef TARGET_WORDS_BIGENDIAN
19869 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19871 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19872 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19873 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19874 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19875 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19876 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19877 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19878 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19879 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19880 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19881 << env
->cpu_model
->CP0_LLAddr_shift
;
19882 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19883 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19884 env
->CCRes
= env
->cpu_model
->CCRes
;
19885 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19886 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19887 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19888 env
->current_tc
= 0;
19889 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19890 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19891 #if defined(TARGET_MIPS64)
19892 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19893 env
->SEGMask
|= 3ULL << 62;
19896 env
->PABITS
= env
->cpu_model
->PABITS
;
19897 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19898 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19899 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19900 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19901 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19902 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19903 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19904 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19905 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19906 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19907 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19908 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19909 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19910 env
->msair
= env
->cpu_model
->MSAIR
;
19911 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19913 #if defined(CONFIG_USER_ONLY)
19914 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19915 # ifdef TARGET_MIPS64
19916 /* Enable 64-bit register mode. */
19917 env
->CP0_Status
|= (1 << CP0St_PX
);
19919 # ifdef TARGET_ABI_MIPSN64
19920 /* Enable 64-bit address mode. */
19921 env
->CP0_Status
|= (1 << CP0St_UX
);
19923 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19924 hardware registers. */
19925 env
->CP0_HWREna
|= 0x0000000F;
19926 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19927 env
->CP0_Status
|= (1 << CP0St_CU1
);
19929 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19930 env
->CP0_Status
|= (1 << CP0St_MX
);
19932 # if defined(TARGET_MIPS64)
19933 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19934 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19935 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19936 env
->CP0_Status
|= (1 << CP0St_FR
);
19940 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19941 /* If the exception was raised from a delay slot,
19942 come back to the jump. */
19943 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
19944 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
19946 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
19948 env
->active_tc
.PC
= (int32_t)0xBFC00000;
19949 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
19950 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
19951 env
->CP0_Wired
= 0;
19952 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
19953 if (kvm_enabled()) {
19954 env
->CP0_EBase
|= 0x40000000;
19956 env
->CP0_EBase
|= 0x80000000;
19958 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
19959 /* vectored interrupts not implemented, timer on int 7,
19960 no performance counters. */
19961 env
->CP0_IntCtl
= 0xe0000000;
19965 for (i
= 0; i
< 7; i
++) {
19966 env
->CP0_WatchLo
[i
] = 0;
19967 env
->CP0_WatchHi
[i
] = 0x80000000;
19969 env
->CP0_WatchLo
[7] = 0;
19970 env
->CP0_WatchHi
[7] = 0;
19972 /* Count register increments in debug mode, EJTAG version 1 */
19973 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
19975 cpu_mips_store_count(env
, 1);
19977 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
19980 /* Only TC0 on VPE 0 starts as active. */
19981 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
19982 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
19983 env
->tcs
[i
].CP0_TCHalt
= 1;
19985 env
->active_tc
.CP0_TCHalt
= 1;
19988 if (cs
->cpu_index
== 0) {
19989 /* VPE0 starts up enabled. */
19990 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
19991 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
19993 /* TC0 starts up unhalted. */
19995 env
->active_tc
.CP0_TCHalt
= 0;
19996 env
->tcs
[0].CP0_TCHalt
= 0;
19997 /* With thread 0 active. */
19998 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
19999 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20003 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20004 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20005 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20006 env
->CP0_Status
|= (1 << CP0St_FR
);
20010 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20014 compute_hflags(env
);
20015 restore_rounding_mode(env
);
20016 restore_flush_mode(env
);
20017 restore_pamask(env
);
20018 cs
->exception_index
= EXCP_NONE
;
20020 if (semihosting_get_argc()) {
20021 /* UHI interface can be used to obtain argc and argv */
20022 env
->active_tc
.gpr
[4] = -1;
20026 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20027 target_ulong
*data
)
20029 env
->active_tc
.PC
= data
[0];
20030 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20031 env
->hflags
|= data
[1];
20032 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20033 case MIPS_HFLAG_BR
:
20035 case MIPS_HFLAG_BC
:
20036 case MIPS_HFLAG_BL
:
20038 env
->btarget
= data
[2];