2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "trace-tcg.h"
39 #define MIPS_DEBUG_DISAS 0
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL
= (0x00 << 26),
47 OPC_REGIMM
= (0x01 << 26),
48 OPC_CP0
= (0x10 << 26),
49 OPC_CP1
= (0x11 << 26),
50 OPC_CP2
= (0x12 << 26),
51 OPC_CP3
= (0x13 << 26),
52 OPC_SPECIAL2
= (0x1C << 26),
53 OPC_SPECIAL3
= (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI
= (0x08 << 26),
56 OPC_ADDIU
= (0x09 << 26),
57 OPC_SLTI
= (0x0A << 26),
58 OPC_SLTIU
= (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI
= (0x0C << 26),
61 OPC_ORI
= (0x0D << 26),
62 OPC_XORI
= (0x0E << 26),
63 OPC_LUI
= (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26),
79 OPC_DAUI
= (0x1D << 26),
81 OPC_LDL
= (0x1A << 26),
82 OPC_LDR
= (0x1B << 26),
83 OPC_LB
= (0x20 << 26),
84 OPC_LH
= (0x21 << 26),
85 OPC_LWL
= (0x22 << 26),
86 OPC_LW
= (0x23 << 26),
87 OPC_LWPC
= OPC_LW
| 0x5,
88 OPC_LBU
= (0x24 << 26),
89 OPC_LHU
= (0x25 << 26),
90 OPC_LWR
= (0x26 << 26),
91 OPC_LWU
= (0x27 << 26),
92 OPC_SB
= (0x28 << 26),
93 OPC_SH
= (0x29 << 26),
94 OPC_SWL
= (0x2A << 26),
95 OPC_SW
= (0x2B << 26),
96 OPC_SDL
= (0x2C << 26),
97 OPC_SDR
= (0x2D << 26),
98 OPC_SWR
= (0x2E << 26),
99 OPC_LL
= (0x30 << 26),
100 OPC_LLD
= (0x34 << 26),
101 OPC_LD
= (0x37 << 26),
102 OPC_LDPC
= OPC_LD
| 0x5,
103 OPC_SC
= (0x38 << 26),
104 OPC_SCD
= (0x3C << 26),
105 OPC_SD
= (0x3F << 26),
106 /* Floating point load/store */
107 OPC_LWC1
= (0x31 << 26),
108 OPC_LWC2
= (0x32 << 26),
109 OPC_LDC1
= (0x35 << 26),
110 OPC_LDC2
= (0x36 << 26),
111 OPC_SWC1
= (0x39 << 26),
112 OPC_SWC2
= (0x3A << 26),
113 OPC_SDC1
= (0x3D << 26),
114 OPC_SDC2
= (0x3E << 26),
115 /* Compact Branches */
116 OPC_BLEZALC
= (0x06 << 26),
117 OPC_BGEZALC
= (0x06 << 26),
118 OPC_BGEUC
= (0x06 << 26),
119 OPC_BGTZALC
= (0x07 << 26),
120 OPC_BLTZALC
= (0x07 << 26),
121 OPC_BLTUC
= (0x07 << 26),
122 OPC_BOVC
= (0x08 << 26),
123 OPC_BEQZALC
= (0x08 << 26),
124 OPC_BEQC
= (0x08 << 26),
125 OPC_BLEZC
= (0x16 << 26),
126 OPC_BGEZC
= (0x16 << 26),
127 OPC_BGEC
= (0x16 << 26),
128 OPC_BGTZC
= (0x17 << 26),
129 OPC_BLTZC
= (0x17 << 26),
130 OPC_BLTC
= (0x17 << 26),
131 OPC_BNVC
= (0x18 << 26),
132 OPC_BNEZALC
= (0x18 << 26),
133 OPC_BNEC
= (0x18 << 26),
134 OPC_BC
= (0x32 << 26),
135 OPC_BEQZC
= (0x36 << 26),
136 OPC_JIC
= (0x36 << 26),
137 OPC_BALC
= (0x3A << 26),
138 OPC_BNEZC
= (0x3E << 26),
139 OPC_JIALC
= (0x3E << 26),
140 /* MDMX ASE specific */
141 OPC_MDMX
= (0x1E << 26),
142 /* MSA ASE, same as MDMX */
144 /* Cache and prefetch */
145 OPC_CACHE
= (0x2F << 26),
146 OPC_PREF
= (0x33 << 26),
147 /* PC-relative address computation / loads */
148 OPC_PCREL
= (0x3B << 26),
151 /* PC-relative address computation / loads */
152 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
153 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
155 /* Instructions determined by bits 19 and 20 */
156 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
157 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
158 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
160 /* Instructions determined by bits 16 ... 20 */
161 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
162 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
165 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
168 /* MIPS special opcodes */
169 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
173 OPC_SLL
= 0x00 | OPC_SPECIAL
,
174 /* NOP is SLL r0, r0, 0 */
175 /* SSNOP is SLL r0, r0, 1 */
176 /* EHB is SLL r0, r0, 3 */
177 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
178 OPC_ROTR
= OPC_SRL
| (1 << 21),
179 OPC_SRA
= 0x03 | OPC_SPECIAL
,
180 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
181 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
182 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
183 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
184 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
185 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
186 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
187 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
188 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
189 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
190 OPC_DROTR
= OPC_DSRL
| (1 << 21),
191 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
192 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
193 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
194 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
195 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
196 /* Multiplication / division */
197 OPC_MULT
= 0x18 | OPC_SPECIAL
,
198 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
199 OPC_DIV
= 0x1A | OPC_SPECIAL
,
200 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
201 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
202 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
203 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
204 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
206 /* 2 registers arithmetic / logic */
207 OPC_ADD
= 0x20 | OPC_SPECIAL
,
208 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
209 OPC_SUB
= 0x22 | OPC_SPECIAL
,
210 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
211 OPC_AND
= 0x24 | OPC_SPECIAL
,
212 OPC_OR
= 0x25 | OPC_SPECIAL
,
213 OPC_XOR
= 0x26 | OPC_SPECIAL
,
214 OPC_NOR
= 0x27 | OPC_SPECIAL
,
215 OPC_SLT
= 0x2A | OPC_SPECIAL
,
216 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
217 OPC_DADD
= 0x2C | OPC_SPECIAL
,
218 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
219 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
220 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
222 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
223 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
225 OPC_TGE
= 0x30 | OPC_SPECIAL
,
226 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
227 OPC_TLT
= 0x32 | OPC_SPECIAL
,
228 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
229 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
230 OPC_TNE
= 0x36 | OPC_SPECIAL
,
231 /* HI / LO registers load & stores */
232 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
233 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
234 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
235 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
236 /* Conditional moves */
237 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
238 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
240 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
241 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
243 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
246 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
247 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
248 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
249 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
250 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
252 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
253 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
254 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
255 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
258 /* R6 Multiply and Divide instructions have the same Opcode
259 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
260 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
263 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
264 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
265 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
266 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
267 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
268 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
269 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
270 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
272 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
273 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
274 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
275 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
276 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
277 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
278 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
279 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
281 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
282 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
283 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
284 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
285 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
287 OPC_LSA
= 0x05 | OPC_SPECIAL
,
288 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
291 /* Multiplication variants of the vr54xx. */
292 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
295 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
296 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
298 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
300 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
304 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
305 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
306 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
307 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
308 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
311 /* REGIMM (rt field) opcodes */
312 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
315 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
316 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
317 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
318 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
319 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
320 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
321 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
322 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
323 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
324 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
325 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
326 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
327 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
328 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
329 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
330 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
332 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
333 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
336 /* Special2 opcodes */
337 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
340 /* Multiply & xxx operations */
341 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
342 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
343 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
344 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
345 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
347 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
348 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
349 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
350 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
351 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
352 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
353 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
354 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
355 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
356 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
357 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
358 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
360 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
361 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
362 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
363 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
365 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
368 /* Special3 opcodes */
369 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
372 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
373 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
374 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
375 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
376 OPC_INS
= 0x04 | OPC_SPECIAL3
,
377 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
378 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
379 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
380 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
381 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
382 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
383 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
384 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
387 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
388 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
389 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
390 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
391 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
392 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
393 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
394 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
395 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
396 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
397 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
398 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
401 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
402 /* MIPS DSP Arithmetic */
403 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
404 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
407 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
408 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
409 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
410 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
411 /* MIPS DSP GPR-Based Shift Sub-class */
412 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
413 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
414 /* MIPS DSP Multiply Sub-class insns */
415 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
416 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
417 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
418 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
419 /* DSP Bit/Manipulation Sub-class */
420 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
421 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
422 /* MIPS DSP Append Sub-class */
423 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
424 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
425 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
426 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
427 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
430 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
431 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
432 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
433 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
434 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
435 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
439 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
442 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
443 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
444 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
445 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
446 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
447 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
451 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
454 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
455 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
456 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
457 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
458 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
461 /* MIPS DSP REGIMM opcodes */
463 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
464 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
467 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
470 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
471 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
472 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
473 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
476 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
478 /* MIPS DSP Arithmetic Sub-class */
479 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
497 /* MIPS DSP Multiply Sub-class insns */
498 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
506 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
507 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
509 /* MIPS DSP Arithmetic Sub-class */
510 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
522 /* MIPS DSP Multiply Sub-class insns */
523 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
529 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
531 /* MIPS DSP Arithmetic Sub-class */
532 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
545 /* DSP Bit/Manipulation Sub-class */
546 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
553 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
555 /* MIPS DSP Arithmetic Sub-class */
556 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
563 /* DSP Compare-Pick Sub-class */
564 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
581 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
583 /* MIPS DSP GPR-Based Shift Sub-class */
584 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
608 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Multiply Sub-class insns */
611 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
635 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* DSP Bit/Manipulation Sub-class */
638 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
641 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
643 /* MIPS DSP Append Sub-class */
644 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
645 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
646 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
649 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
651 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
652 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
666 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
667 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
668 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
671 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Arithmetic Sub-class */
674 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
691 /* DSP Bit/Manipulation Sub-class */
692 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
700 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Multiply Sub-class insns */
703 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
708 /* MIPS DSP Arithmetic Sub-class */
709 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
732 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* DSP Compare-Pick Sub-class */
735 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
754 /* MIPS DSP Arithmetic Sub-class */
755 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
765 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
767 /* DSP Append Sub-class */
768 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
771 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
774 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
776 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
777 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
800 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
802 /* DSP Bit/Manipulation Sub-class */
803 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
806 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Multiply Sub-class insns */
809 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
837 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
839 /* MIPS DSP GPR-Based Shift Sub-class */
840 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
868 /* Coprocessor 0 (rs field) */
869 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
872 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
873 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
874 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
875 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
876 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
877 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
878 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
879 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
880 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
881 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
882 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
883 OPC_C0
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
889 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
892 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
895 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
896 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
899 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
902 /* Coprocessor 0 (with rs == C0) */
903 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
906 OPC_TLBR
= 0x01 | OPC_C0
,
907 OPC_TLBWI
= 0x02 | OPC_C0
,
908 OPC_TLBINV
= 0x03 | OPC_C0
,
909 OPC_TLBINVF
= 0x04 | OPC_C0
,
910 OPC_TLBWR
= 0x06 | OPC_C0
,
911 OPC_TLBP
= 0x08 | OPC_C0
,
912 OPC_RFE
= 0x10 | OPC_C0
,
913 OPC_ERET
= 0x18 | OPC_C0
,
914 OPC_DERET
= 0x1F | OPC_C0
,
915 OPC_WAIT
= 0x20 | OPC_C0
,
918 /* Coprocessor 1 (rs field) */
919 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
921 /* Values for the fmt field in FP instructions */
923 /* 0 - 15 are reserved */
924 FMT_S
= 16, /* single fp */
925 FMT_D
= 17, /* double fp */
926 FMT_E
= 18, /* extended fp */
927 FMT_Q
= 19, /* quad fp */
928 FMT_W
= 20, /* 32-bit fixed */
929 FMT_L
= 21, /* 64-bit fixed */
930 FMT_PS
= 22, /* paired single fp */
931 /* 23 - 31 are reserved */
935 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
936 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
937 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
938 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
939 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
940 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
941 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
942 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
943 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
944 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
945 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
946 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
947 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
948 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
949 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
950 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
951 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
952 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
953 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
954 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
955 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
956 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
957 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
958 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
959 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
960 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
961 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
962 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
963 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
964 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
967 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
968 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
971 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
972 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
973 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
974 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
978 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
979 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
983 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
984 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
987 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
990 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
991 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
992 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
993 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
994 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
995 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
996 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
997 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
998 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
999 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1000 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1003 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1006 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1015 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1024 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1028 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1029 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1030 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1031 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1033 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1042 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1049 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1056 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1063 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1070 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1077 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1084 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1091 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1100 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1103 OPC_LWXC1
= 0x00 | OPC_CP3
,
1104 OPC_LDXC1
= 0x01 | OPC_CP3
,
1105 OPC_LUXC1
= 0x05 | OPC_CP3
,
1106 OPC_SWXC1
= 0x08 | OPC_CP3
,
1107 OPC_SDXC1
= 0x09 | OPC_CP3
,
1108 OPC_SUXC1
= 0x0D | OPC_CP3
,
1109 OPC_PREFX
= 0x0F | OPC_CP3
,
1110 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1111 OPC_MADD_S
= 0x20 | OPC_CP3
,
1112 OPC_MADD_D
= 0x21 | OPC_CP3
,
1113 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1114 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1115 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1116 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1117 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1118 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1119 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1120 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1121 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1122 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1126 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1128 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1129 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1130 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1131 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1132 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1133 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1134 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1135 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1136 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1137 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1138 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1139 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1140 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1141 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1142 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1143 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1144 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1145 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1146 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1147 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1148 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1150 /* MI10 instruction */
1151 OPC_LD_B
= (0x20) | OPC_MSA
,
1152 OPC_LD_H
= (0x21) | OPC_MSA
,
1153 OPC_LD_W
= (0x22) | OPC_MSA
,
1154 OPC_LD_D
= (0x23) | OPC_MSA
,
1155 OPC_ST_B
= (0x24) | OPC_MSA
,
1156 OPC_ST_H
= (0x25) | OPC_MSA
,
1157 OPC_ST_W
= (0x26) | OPC_MSA
,
1158 OPC_ST_D
= (0x27) | OPC_MSA
,
1162 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1163 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1164 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1165 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1166 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1168 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1169 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1170 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1171 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1172 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1173 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1174 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1176 /* I8 instruction */
1177 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1178 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1180 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1181 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1182 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1183 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1184 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1185 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1186 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1188 /* VEC/2R/2RF instruction */
1189 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1190 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1191 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1192 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1193 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1194 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1195 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1200 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1201 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1202 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1203 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1204 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1206 /* 2RF instruction df(bit 16) = _w, _d */
1207 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1210 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1211 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1212 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1213 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1214 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1217 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1218 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1224 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1225 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1226 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1227 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1228 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1229 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1230 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1231 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1232 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1233 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1234 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1235 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1236 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1237 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1238 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1239 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1240 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1241 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1242 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1243 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1244 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1245 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1246 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1247 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1248 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1249 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1250 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1251 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1252 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1253 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1254 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1255 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1256 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1257 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1258 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1259 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1260 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1261 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1262 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1263 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1264 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1265 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1266 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1267 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1268 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1270 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1271 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1272 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1273 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1274 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1275 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1282 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1283 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1284 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1285 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1286 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1287 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1289 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1290 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1292 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1294 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1296 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1300 /* 3RF instruction _df(bit 21) = _w, _d */
1301 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1302 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1303 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1314 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1317 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1318 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1339 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1343 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1344 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1347 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1348 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1349 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1350 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1351 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1352 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1358 /* global register indices */
1359 static TCGv_env cpu_env
;
1360 static TCGv cpu_gpr
[32], cpu_PC
;
1361 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1362 static TCGv cpu_dspctrl
, btarget
, bcond
;
1363 static TCGv_i32 hflags
;
1364 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1365 static TCGv_i64 fpu_f64
[32];
1366 static TCGv_i64 msa_wr_d
[64];
1368 #include "exec/gen-icount.h"
1370 #define gen_helper_0e0i(name, arg) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1372 gen_helper_##name(cpu_env, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_0e1i(name, arg1, arg2) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1378 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e0i(name, ret, arg1) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1384 gen_helper_##name(ret, cpu_env, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1390 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1402 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1407 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1408 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1409 tcg_temp_free_i32(helper_tmp); \
1412 typedef struct DisasContext
{
1413 struct TranslationBlock
*tb
;
1414 target_ulong pc
, saved_pc
;
1416 int singlestep_enabled
;
1418 int32_t CP0_Config1
;
1419 /* Routine used to access memory */
1421 TCGMemOp default_tcg_memop_mask
;
1422 uint32_t hflags
, saved_hflags
;
1424 target_ulong btarget
;
1433 int CP0_LLAddr_shift
;
1441 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1442 * exception condition */
1443 BS_STOP
= 1, /* We want to stop translation for any reason */
1444 BS_BRANCH
= 2, /* We reached a branch condition */
1445 BS_EXCP
= 3, /* We reached an exception condition */
1448 static const char * const regnames
[] = {
1449 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1450 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1451 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1452 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1455 static const char * const regnames_HI
[] = {
1456 "HI0", "HI1", "HI2", "HI3",
1459 static const char * const regnames_LO
[] = {
1460 "LO0", "LO1", "LO2", "LO3",
1463 static const char * const fregnames
[] = {
1464 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1465 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1466 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1467 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1470 static const char * const msaregnames
[] = {
1471 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1472 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1473 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1474 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1475 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1476 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1477 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1478 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1479 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1480 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1481 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1482 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1483 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1484 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1485 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1486 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1489 #define LOG_DISAS(...) \
1491 if (MIPS_DEBUG_DISAS) { \
1492 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1496 #define MIPS_INVAL(op) \
1498 if (MIPS_DEBUG_DISAS) { \
1499 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1500 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1501 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1502 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1506 /* General purpose registers moves. */
1507 static inline void gen_load_gpr (TCGv t
, int reg
)
1510 tcg_gen_movi_tl(t
, 0);
1512 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1515 static inline void gen_store_gpr (TCGv t
, int reg
)
1518 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1521 /* Moves to/from shadow registers. */
1522 static inline void gen_load_srsgpr (int from
, int to
)
1524 TCGv t0
= tcg_temp_new();
1527 tcg_gen_movi_tl(t0
, 0);
1529 TCGv_i32 t2
= tcg_temp_new_i32();
1530 TCGv_ptr addr
= tcg_temp_new_ptr();
1532 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1533 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1534 tcg_gen_andi_i32(t2
, t2
, 0xf);
1535 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1536 tcg_gen_ext_i32_ptr(addr
, t2
);
1537 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1539 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1540 tcg_temp_free_ptr(addr
);
1541 tcg_temp_free_i32(t2
);
1543 gen_store_gpr(t0
, to
);
1547 static inline void gen_store_srsgpr (int from
, int to
)
1550 TCGv t0
= tcg_temp_new();
1551 TCGv_i32 t2
= tcg_temp_new_i32();
1552 TCGv_ptr addr
= tcg_temp_new_ptr();
1554 gen_load_gpr(t0
, from
);
1555 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1556 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1557 tcg_gen_andi_i32(t2
, t2
, 0xf);
1558 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1559 tcg_gen_ext_i32_ptr(addr
, t2
);
1560 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1562 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1563 tcg_temp_free_ptr(addr
);
1564 tcg_temp_free_i32(t2
);
1570 static inline void gen_save_pc(target_ulong pc
)
1572 tcg_gen_movi_tl(cpu_PC
, pc
);
1575 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1577 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1578 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1579 gen_save_pc(ctx
->pc
);
1580 ctx
->saved_pc
= ctx
->pc
;
1582 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1583 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1584 ctx
->saved_hflags
= ctx
->hflags
;
1585 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1591 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1597 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1599 ctx
->saved_hflags
= ctx
->hflags
;
1600 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1606 ctx
->btarget
= env
->btarget
;
1611 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1613 TCGv_i32 texcp
= tcg_const_i32(excp
);
1614 TCGv_i32 terr
= tcg_const_i32(err
);
1615 save_cpu_state(ctx
, 1);
1616 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1617 tcg_temp_free_i32(terr
);
1618 tcg_temp_free_i32(texcp
);
1619 ctx
->bstate
= BS_EXCP
;
1622 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1624 gen_helper_0e0i(raise_exception
, excp
);
1627 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1629 generate_exception_err(ctx
, excp
, 0);
1632 /* Floating point register moves. */
1633 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1635 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1636 generate_exception(ctx
, EXCP_RI
);
1638 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1641 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1644 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1645 generate_exception(ctx
, EXCP_RI
);
1647 t64
= tcg_temp_new_i64();
1648 tcg_gen_extu_i32_i64(t64
, t
);
1649 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1650 tcg_temp_free_i64(t64
);
1653 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1655 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1656 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1658 gen_load_fpr32(ctx
, t
, reg
| 1);
1662 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1664 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1665 TCGv_i64 t64
= tcg_temp_new_i64();
1666 tcg_gen_extu_i32_i64(t64
, t
);
1667 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1668 tcg_temp_free_i64(t64
);
1670 gen_store_fpr32(ctx
, t
, reg
| 1);
1674 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1676 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1677 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1679 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1683 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1685 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1686 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1689 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1690 t0
= tcg_temp_new_i64();
1691 tcg_gen_shri_i64(t0
, t
, 32);
1692 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1693 tcg_temp_free_i64(t0
);
1697 static inline int get_fp_bit (int cc
)
1705 /* Addresses computation */
1706 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1708 tcg_gen_add_tl(ret
, arg0
, arg1
);
1710 #if defined(TARGET_MIPS64)
1711 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1712 tcg_gen_ext32s_i64(ret
, ret
);
1717 /* Addresses computation (translation time) */
1718 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1721 target_long sum
= base
+ offset
;
1723 #if defined(TARGET_MIPS64)
1724 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1731 /* Sign-extract the low 32-bits to a target_long. */
1732 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1734 #if defined(TARGET_MIPS64)
1735 tcg_gen_ext32s_i64(ret
, arg
);
1737 tcg_gen_extrl_i64_i32(ret
, arg
);
1741 /* Sign-extract the high 32-bits to a target_long. */
1742 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1744 #if defined(TARGET_MIPS64)
1745 tcg_gen_sari_i64(ret
, arg
, 32);
1747 tcg_gen_extrh_i64_i32(ret
, arg
);
1751 static inline void check_cp0_enabled(DisasContext
*ctx
)
1753 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1754 generate_exception_err(ctx
, EXCP_CpU
, 0);
1757 static inline void check_cp1_enabled(DisasContext
*ctx
)
1759 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1760 generate_exception_err(ctx
, EXCP_CpU
, 1);
1763 /* Verify that the processor is running with COP1X instructions enabled.
1764 This is associated with the nabla symbol in the MIPS32 and MIPS64
1767 static inline void check_cop1x(DisasContext
*ctx
)
1769 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1770 generate_exception_end(ctx
, EXCP_RI
);
1773 /* Verify that the processor is running with 64-bit floating-point
1774 operations enabled. */
1776 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1778 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1779 generate_exception_end(ctx
, EXCP_RI
);
1783 * Verify if floating point register is valid; an operation is not defined
1784 * if bit 0 of any register specification is set and the FR bit in the
1785 * Status register equals zero, since the register numbers specify an
1786 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1787 * in the Status register equals one, both even and odd register numbers
1788 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1790 * Multiple 64 bit wide registers can be checked by calling
1791 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1793 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1795 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1796 generate_exception_end(ctx
, EXCP_RI
);
1799 /* Verify that the processor is running with DSP instructions enabled.
1800 This is enabled by CP0 Status register MX(24) bit.
1803 static inline void check_dsp(DisasContext
*ctx
)
1805 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1806 if (ctx
->insn_flags
& ASE_DSP
) {
1807 generate_exception_end(ctx
, EXCP_DSPDIS
);
1809 generate_exception_end(ctx
, EXCP_RI
);
1814 static inline void check_dspr2(DisasContext
*ctx
)
1816 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1817 if (ctx
->insn_flags
& ASE_DSP
) {
1818 generate_exception_end(ctx
, EXCP_DSPDIS
);
1820 generate_exception_end(ctx
, EXCP_RI
);
1825 /* This code generates a "reserved instruction" exception if the
1826 CPU does not support the instruction set corresponding to flags. */
1827 static inline void check_insn(DisasContext
*ctx
, int flags
)
1829 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1830 generate_exception_end(ctx
, EXCP_RI
);
1834 /* This code generates a "reserved instruction" exception if the
1835 CPU has corresponding flag set which indicates that the instruction
1836 has been removed. */
1837 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1839 if (unlikely(ctx
->insn_flags
& flags
)) {
1840 generate_exception_end(ctx
, EXCP_RI
);
1844 /* This code generates a "reserved instruction" exception if the
1845 CPU does not support 64-bit paired-single (PS) floating point data type */
1846 static inline void check_ps(DisasContext
*ctx
)
1848 if (unlikely(!ctx
->ps
)) {
1849 generate_exception(ctx
, EXCP_RI
);
1851 check_cp1_64bitmode(ctx
);
1854 #ifdef TARGET_MIPS64
1855 /* This code generates a "reserved instruction" exception if 64-bit
1856 instructions are not enabled. */
1857 static inline void check_mips_64(DisasContext
*ctx
)
1859 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1860 generate_exception_end(ctx
, EXCP_RI
);
1864 #ifndef CONFIG_USER_ONLY
1865 static inline void check_mvh(DisasContext
*ctx
)
1867 if (unlikely(!ctx
->mvh
)) {
1868 generate_exception(ctx
, EXCP_RI
);
1873 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1874 calling interface for 32 and 64-bit FPRs. No sense in changing
1875 all callers for gen_load_fpr32 when we need the CTX parameter for
1877 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1878 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1879 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1880 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1881 int ft, int fs, int cc) \
1883 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1884 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1893 check_cp1_registers(ctx, fs | ft); \
1901 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1902 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1904 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1905 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1906 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1907 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1908 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1909 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1910 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1911 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1912 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1913 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1914 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1915 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1916 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1917 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1918 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1919 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1922 tcg_temp_free_i##bits (fp0); \
1923 tcg_temp_free_i##bits (fp1); \
1926 FOP_CONDS(, 0, d
, FMT_D
, 64)
1927 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1928 FOP_CONDS(, 0, s
, FMT_S
, 32)
1929 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1930 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1931 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1934 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1935 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1936 int ft, int fs, int fd) \
1938 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1939 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1940 if (ifmt == FMT_D) { \
1941 check_cp1_registers(ctx, fs | ft | fd); \
1943 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1944 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1947 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1950 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2007 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2010 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2016 tcg_temp_free_i ## bits (fp0); \
2017 tcg_temp_free_i ## bits (fp1); \
2020 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2021 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2023 #undef gen_ldcmp_fpr32
2024 #undef gen_ldcmp_fpr64
2026 /* load/store instructions. */
2027 #ifdef CONFIG_USER_ONLY
2028 #define OP_LD_ATOMIC(insn,fname) \
2029 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2031 TCGv t0 = tcg_temp_new(); \
2032 tcg_gen_mov_tl(t0, arg1); \
2033 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2034 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2035 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2036 tcg_temp_free(t0); \
2039 #define OP_LD_ATOMIC(insn,fname) \
2040 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2042 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2045 OP_LD_ATOMIC(ll
,ld32s
);
2046 #if defined(TARGET_MIPS64)
2047 OP_LD_ATOMIC(lld
,ld64
);
2051 #ifdef CONFIG_USER_ONLY
2052 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2053 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2055 TCGv t0 = tcg_temp_new(); \
2056 TCGLabel *l1 = gen_new_label(); \
2057 TCGLabel *l2 = gen_new_label(); \
2059 tcg_gen_andi_tl(t0, arg2, almask); \
2060 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2061 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2062 generate_exception(ctx, EXCP_AdES); \
2063 gen_set_label(l1); \
2064 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2065 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2066 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2067 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2068 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2069 generate_exception_end(ctx, EXCP_SC); \
2070 gen_set_label(l2); \
2071 tcg_gen_movi_tl(t0, 0); \
2072 gen_store_gpr(t0, rt); \
2073 tcg_temp_free(t0); \
2076 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2077 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2079 TCGv t0 = tcg_temp_new(); \
2080 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2081 gen_store_gpr(t0, rt); \
2082 tcg_temp_free(t0); \
2085 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2086 #if defined(TARGET_MIPS64)
2087 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2091 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2092 int base
, int16_t offset
)
2095 tcg_gen_movi_tl(addr
, offset
);
2096 } else if (offset
== 0) {
2097 gen_load_gpr(addr
, base
);
2099 tcg_gen_movi_tl(addr
, offset
);
2100 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2104 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2106 target_ulong pc
= ctx
->pc
;
2108 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2109 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2114 pc
&= ~(target_ulong
)3;
2119 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2120 int rt
, int base
, int16_t offset
)
2124 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2125 /* Loongson CPU uses a load to zero register for prefetch.
2126 We emulate it as a NOP. On other CPU we must perform the
2127 actual memory access. */
2131 t0
= tcg_temp_new();
2132 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2135 #if defined(TARGET_MIPS64)
2137 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2138 ctx
->default_tcg_memop_mask
);
2139 gen_store_gpr(t0
, rt
);
2142 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2143 ctx
->default_tcg_memop_mask
);
2144 gen_store_gpr(t0
, rt
);
2148 op_ld_lld(t0
, t0
, ctx
);
2149 gen_store_gpr(t0
, rt
);
2152 t1
= tcg_temp_new();
2153 /* Do a byte access to possibly trigger a page
2154 fault with the unaligned address. */
2155 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2156 tcg_gen_andi_tl(t1
, t0
, 7);
2157 #ifndef TARGET_WORDS_BIGENDIAN
2158 tcg_gen_xori_tl(t1
, t1
, 7);
2160 tcg_gen_shli_tl(t1
, t1
, 3);
2161 tcg_gen_andi_tl(t0
, t0
, ~7);
2162 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2163 tcg_gen_shl_tl(t0
, t0
, t1
);
2164 t2
= tcg_const_tl(-1);
2165 tcg_gen_shl_tl(t2
, t2
, t1
);
2166 gen_load_gpr(t1
, rt
);
2167 tcg_gen_andc_tl(t1
, t1
, t2
);
2169 tcg_gen_or_tl(t0
, t0
, t1
);
2171 gen_store_gpr(t0
, rt
);
2174 t1
= tcg_temp_new();
2175 /* Do a byte access to possibly trigger a page
2176 fault with the unaligned address. */
2177 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2178 tcg_gen_andi_tl(t1
, t0
, 7);
2179 #ifdef TARGET_WORDS_BIGENDIAN
2180 tcg_gen_xori_tl(t1
, t1
, 7);
2182 tcg_gen_shli_tl(t1
, t1
, 3);
2183 tcg_gen_andi_tl(t0
, t0
, ~7);
2184 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2185 tcg_gen_shr_tl(t0
, t0
, t1
);
2186 tcg_gen_xori_tl(t1
, t1
, 63);
2187 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2188 tcg_gen_shl_tl(t2
, t2
, t1
);
2189 gen_load_gpr(t1
, rt
);
2190 tcg_gen_and_tl(t1
, t1
, t2
);
2192 tcg_gen_or_tl(t0
, t0
, t1
);
2194 gen_store_gpr(t0
, rt
);
2197 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2198 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2200 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2201 gen_store_gpr(t0
, rt
);
2205 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2206 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2208 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2209 gen_store_gpr(t0
, rt
);
2212 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2213 ctx
->default_tcg_memop_mask
);
2214 gen_store_gpr(t0
, rt
);
2217 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2218 ctx
->default_tcg_memop_mask
);
2219 gen_store_gpr(t0
, rt
);
2222 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2223 ctx
->default_tcg_memop_mask
);
2224 gen_store_gpr(t0
, rt
);
2227 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2228 gen_store_gpr(t0
, rt
);
2231 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2232 gen_store_gpr(t0
, rt
);
2235 t1
= tcg_temp_new();
2236 /* Do a byte access to possibly trigger a page
2237 fault with the unaligned address. */
2238 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2239 tcg_gen_andi_tl(t1
, t0
, 3);
2240 #ifndef TARGET_WORDS_BIGENDIAN
2241 tcg_gen_xori_tl(t1
, t1
, 3);
2243 tcg_gen_shli_tl(t1
, t1
, 3);
2244 tcg_gen_andi_tl(t0
, t0
, ~3);
2245 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2246 tcg_gen_shl_tl(t0
, t0
, t1
);
2247 t2
= tcg_const_tl(-1);
2248 tcg_gen_shl_tl(t2
, t2
, t1
);
2249 gen_load_gpr(t1
, rt
);
2250 tcg_gen_andc_tl(t1
, t1
, t2
);
2252 tcg_gen_or_tl(t0
, t0
, t1
);
2254 tcg_gen_ext32s_tl(t0
, t0
);
2255 gen_store_gpr(t0
, rt
);
2258 t1
= tcg_temp_new();
2259 /* Do a byte access to possibly trigger a page
2260 fault with the unaligned address. */
2261 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2262 tcg_gen_andi_tl(t1
, t0
, 3);
2263 #ifdef TARGET_WORDS_BIGENDIAN
2264 tcg_gen_xori_tl(t1
, t1
, 3);
2266 tcg_gen_shli_tl(t1
, t1
, 3);
2267 tcg_gen_andi_tl(t0
, t0
, ~3);
2268 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2269 tcg_gen_shr_tl(t0
, t0
, t1
);
2270 tcg_gen_xori_tl(t1
, t1
, 31);
2271 t2
= tcg_const_tl(0xfffffffeull
);
2272 tcg_gen_shl_tl(t2
, t2
, t1
);
2273 gen_load_gpr(t1
, rt
);
2274 tcg_gen_and_tl(t1
, t1
, t2
);
2276 tcg_gen_or_tl(t0
, t0
, t1
);
2278 tcg_gen_ext32s_tl(t0
, t0
);
2279 gen_store_gpr(t0
, rt
);
2283 op_ld_ll(t0
, t0
, ctx
);
2284 gen_store_gpr(t0
, rt
);
2291 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2292 int base
, int16_t offset
)
2294 TCGv t0
= tcg_temp_new();
2295 TCGv t1
= tcg_temp_new();
2297 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2298 gen_load_gpr(t1
, rt
);
2300 #if defined(TARGET_MIPS64)
2302 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2303 ctx
->default_tcg_memop_mask
);
2306 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2309 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2313 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2314 ctx
->default_tcg_memop_mask
);
2317 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2318 ctx
->default_tcg_memop_mask
);
2321 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2324 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2327 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2335 /* Store conditional */
2336 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2337 int base
, int16_t offset
)
2341 #ifdef CONFIG_USER_ONLY
2342 t0
= tcg_temp_local_new();
2343 t1
= tcg_temp_local_new();
2345 t0
= tcg_temp_new();
2346 t1
= tcg_temp_new();
2348 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2349 gen_load_gpr(t1
, rt
);
2351 #if defined(TARGET_MIPS64)
2354 op_st_scd(t1
, t0
, rt
, ctx
);
2359 op_st_sc(t1
, t0
, rt
, ctx
);
2366 /* Load and store */
2367 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2368 int base
, int16_t offset
)
2370 TCGv t0
= tcg_temp_new();
2372 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2373 /* Don't do NOP if destination is zero: we must perform the actual
2378 TCGv_i32 fp0
= tcg_temp_new_i32();
2379 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2380 ctx
->default_tcg_memop_mask
);
2381 gen_store_fpr32(ctx
, fp0
, ft
);
2382 tcg_temp_free_i32(fp0
);
2387 TCGv_i32 fp0
= tcg_temp_new_i32();
2388 gen_load_fpr32(ctx
, fp0
, ft
);
2389 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2390 ctx
->default_tcg_memop_mask
);
2391 tcg_temp_free_i32(fp0
);
2396 TCGv_i64 fp0
= tcg_temp_new_i64();
2397 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2398 ctx
->default_tcg_memop_mask
);
2399 gen_store_fpr64(ctx
, fp0
, ft
);
2400 tcg_temp_free_i64(fp0
);
2405 TCGv_i64 fp0
= tcg_temp_new_i64();
2406 gen_load_fpr64(ctx
, fp0
, ft
);
2407 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2408 ctx
->default_tcg_memop_mask
);
2409 tcg_temp_free_i64(fp0
);
2413 MIPS_INVAL("flt_ldst");
2414 generate_exception_end(ctx
, EXCP_RI
);
2421 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2422 int rs
, int16_t imm
)
2424 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2425 check_cp1_enabled(ctx
);
2429 check_insn(ctx
, ISA_MIPS2
);
2432 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2435 generate_exception_err(ctx
, EXCP_CpU
, 1);
2439 /* Arithmetic with immediate operand */
2440 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2441 int rt
, int rs
, int16_t imm
)
2443 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2445 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2446 /* If no destination, treat it as a NOP.
2447 For addi, we must generate the overflow exception when needed. */
2453 TCGv t0
= tcg_temp_local_new();
2454 TCGv t1
= tcg_temp_new();
2455 TCGv t2
= tcg_temp_new();
2456 TCGLabel
*l1
= gen_new_label();
2458 gen_load_gpr(t1
, rs
);
2459 tcg_gen_addi_tl(t0
, t1
, uimm
);
2460 tcg_gen_ext32s_tl(t0
, t0
);
2462 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2463 tcg_gen_xori_tl(t2
, t0
, uimm
);
2464 tcg_gen_and_tl(t1
, t1
, t2
);
2466 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2468 /* operands of same sign, result different sign */
2469 generate_exception(ctx
, EXCP_OVERFLOW
);
2471 tcg_gen_ext32s_tl(t0
, t0
);
2472 gen_store_gpr(t0
, rt
);
2478 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2479 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2481 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2484 #if defined(TARGET_MIPS64)
2487 TCGv t0
= tcg_temp_local_new();
2488 TCGv t1
= tcg_temp_new();
2489 TCGv t2
= tcg_temp_new();
2490 TCGLabel
*l1
= gen_new_label();
2492 gen_load_gpr(t1
, rs
);
2493 tcg_gen_addi_tl(t0
, t1
, uimm
);
2495 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2496 tcg_gen_xori_tl(t2
, t0
, uimm
);
2497 tcg_gen_and_tl(t1
, t1
, t2
);
2499 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2501 /* operands of same sign, result different sign */
2502 generate_exception(ctx
, EXCP_OVERFLOW
);
2504 gen_store_gpr(t0
, rt
);
2510 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2512 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2519 /* Logic with immediate operand */
2520 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2521 int rt
, int rs
, int16_t imm
)
2526 /* If no destination, treat it as a NOP. */
2529 uimm
= (uint16_t)imm
;
2532 if (likely(rs
!= 0))
2533 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2535 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2539 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2541 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2544 if (likely(rs
!= 0))
2545 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2547 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2550 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2552 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2553 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2555 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2564 /* Set on less than with immediate operand */
2565 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2566 int rt
, int rs
, int16_t imm
)
2568 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2572 /* If no destination, treat it as a NOP. */
2575 t0
= tcg_temp_new();
2576 gen_load_gpr(t0
, rs
);
2579 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2582 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2588 /* Shifts with immediate operand */
2589 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2590 int rt
, int rs
, int16_t imm
)
2592 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2596 /* If no destination, treat it as a NOP. */
2600 t0
= tcg_temp_new();
2601 gen_load_gpr(t0
, rs
);
2604 tcg_gen_shli_tl(t0
, t0
, uimm
);
2605 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2608 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2612 tcg_gen_ext32u_tl(t0
, t0
);
2613 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2615 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2620 TCGv_i32 t1
= tcg_temp_new_i32();
2622 tcg_gen_trunc_tl_i32(t1
, t0
);
2623 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2624 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2625 tcg_temp_free_i32(t1
);
2627 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2630 #if defined(TARGET_MIPS64)
2632 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2638 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2642 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2644 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2648 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2651 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2654 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2657 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2665 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2666 int rd
, int rs
, int rt
)
2668 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2669 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2670 /* If no destination, treat it as a NOP.
2671 For add & sub, we must generate the overflow exception when needed. */
2678 TCGv t0
= tcg_temp_local_new();
2679 TCGv t1
= tcg_temp_new();
2680 TCGv t2
= tcg_temp_new();
2681 TCGLabel
*l1
= gen_new_label();
2683 gen_load_gpr(t1
, rs
);
2684 gen_load_gpr(t2
, rt
);
2685 tcg_gen_add_tl(t0
, t1
, t2
);
2686 tcg_gen_ext32s_tl(t0
, t0
);
2687 tcg_gen_xor_tl(t1
, t1
, t2
);
2688 tcg_gen_xor_tl(t2
, t0
, t2
);
2689 tcg_gen_andc_tl(t1
, t2
, t1
);
2691 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2693 /* operands of same sign, result different sign */
2694 generate_exception(ctx
, EXCP_OVERFLOW
);
2696 gen_store_gpr(t0
, rd
);
2701 if (rs
!= 0 && rt
!= 0) {
2702 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2703 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2704 } else if (rs
== 0 && rt
!= 0) {
2705 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2706 } else if (rs
!= 0 && rt
== 0) {
2707 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2709 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2714 TCGv t0
= tcg_temp_local_new();
2715 TCGv t1
= tcg_temp_new();
2716 TCGv t2
= tcg_temp_new();
2717 TCGLabel
*l1
= gen_new_label();
2719 gen_load_gpr(t1
, rs
);
2720 gen_load_gpr(t2
, rt
);
2721 tcg_gen_sub_tl(t0
, t1
, t2
);
2722 tcg_gen_ext32s_tl(t0
, t0
);
2723 tcg_gen_xor_tl(t2
, t1
, t2
);
2724 tcg_gen_xor_tl(t1
, t0
, t1
);
2725 tcg_gen_and_tl(t1
, t1
, t2
);
2727 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2729 /* operands of different sign, first operand and result different sign */
2730 generate_exception(ctx
, EXCP_OVERFLOW
);
2732 gen_store_gpr(t0
, rd
);
2737 if (rs
!= 0 && rt
!= 0) {
2738 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2739 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2740 } else if (rs
== 0 && rt
!= 0) {
2741 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2743 } else if (rs
!= 0 && rt
== 0) {
2744 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2746 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2749 #if defined(TARGET_MIPS64)
2752 TCGv t0
= tcg_temp_local_new();
2753 TCGv t1
= tcg_temp_new();
2754 TCGv t2
= tcg_temp_new();
2755 TCGLabel
*l1
= gen_new_label();
2757 gen_load_gpr(t1
, rs
);
2758 gen_load_gpr(t2
, rt
);
2759 tcg_gen_add_tl(t0
, t1
, t2
);
2760 tcg_gen_xor_tl(t1
, t1
, t2
);
2761 tcg_gen_xor_tl(t2
, t0
, t2
);
2762 tcg_gen_andc_tl(t1
, t2
, t1
);
2764 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2766 /* operands of same sign, result different sign */
2767 generate_exception(ctx
, EXCP_OVERFLOW
);
2769 gen_store_gpr(t0
, rd
);
2774 if (rs
!= 0 && rt
!= 0) {
2775 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2776 } else if (rs
== 0 && rt
!= 0) {
2777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2778 } else if (rs
!= 0 && rt
== 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2781 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2786 TCGv t0
= tcg_temp_local_new();
2787 TCGv t1
= tcg_temp_new();
2788 TCGv t2
= tcg_temp_new();
2789 TCGLabel
*l1
= gen_new_label();
2791 gen_load_gpr(t1
, rs
);
2792 gen_load_gpr(t2
, rt
);
2793 tcg_gen_sub_tl(t0
, t1
, t2
);
2794 tcg_gen_xor_tl(t2
, t1
, t2
);
2795 tcg_gen_xor_tl(t1
, t0
, t1
);
2796 tcg_gen_and_tl(t1
, t1
, t2
);
2798 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2800 /* operands of different sign, first operand and result different sign */
2801 generate_exception(ctx
, EXCP_OVERFLOW
);
2803 gen_store_gpr(t0
, rd
);
2808 if (rs
!= 0 && rt
!= 0) {
2809 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2810 } else if (rs
== 0 && rt
!= 0) {
2811 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2812 } else if (rs
!= 0 && rt
== 0) {
2813 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2820 if (likely(rs
!= 0 && rt
!= 0)) {
2821 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2822 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2824 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2830 /* Conditional move */
2831 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2832 int rd
, int rs
, int rt
)
2837 /* If no destination, treat it as a NOP. */
2841 t0
= tcg_temp_new();
2842 gen_load_gpr(t0
, rt
);
2843 t1
= tcg_const_tl(0);
2844 t2
= tcg_temp_new();
2845 gen_load_gpr(t2
, rs
);
2848 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2851 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2854 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2857 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2866 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2867 int rd
, int rs
, int rt
)
2870 /* If no destination, treat it as a NOP. */
2876 if (likely(rs
!= 0 && rt
!= 0)) {
2877 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2879 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2883 if (rs
!= 0 && rt
!= 0) {
2884 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2885 } else if (rs
== 0 && rt
!= 0) {
2886 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2887 } else if (rs
!= 0 && rt
== 0) {
2888 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2890 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2894 if (likely(rs
!= 0 && rt
!= 0)) {
2895 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2896 } else if (rs
== 0 && rt
!= 0) {
2897 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2898 } else if (rs
!= 0 && rt
== 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2901 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2905 if (likely(rs
!= 0 && rt
!= 0)) {
2906 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2907 } else if (rs
== 0 && rt
!= 0) {
2908 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2909 } else if (rs
!= 0 && rt
== 0) {
2910 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2912 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2918 /* Set on lower than */
2919 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2920 int rd
, int rs
, int rt
)
2925 /* If no destination, treat it as a NOP. */
2929 t0
= tcg_temp_new();
2930 t1
= tcg_temp_new();
2931 gen_load_gpr(t0
, rs
);
2932 gen_load_gpr(t1
, rt
);
2935 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2938 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2946 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2947 int rd
, int rs
, int rt
)
2952 /* If no destination, treat it as a NOP.
2953 For add & sub, we must generate the overflow exception when needed. */
2957 t0
= tcg_temp_new();
2958 t1
= tcg_temp_new();
2959 gen_load_gpr(t0
, rs
);
2960 gen_load_gpr(t1
, rt
);
2963 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2964 tcg_gen_shl_tl(t0
, t1
, t0
);
2965 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2968 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2969 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2972 tcg_gen_ext32u_tl(t1
, t1
);
2973 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2974 tcg_gen_shr_tl(t0
, t1
, t0
);
2975 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2979 TCGv_i32 t2
= tcg_temp_new_i32();
2980 TCGv_i32 t3
= tcg_temp_new_i32();
2982 tcg_gen_trunc_tl_i32(t2
, t0
);
2983 tcg_gen_trunc_tl_i32(t3
, t1
);
2984 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2985 tcg_gen_rotr_i32(t2
, t3
, t2
);
2986 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2987 tcg_temp_free_i32(t2
);
2988 tcg_temp_free_i32(t3
);
2991 #if defined(TARGET_MIPS64)
2993 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2994 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2997 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2998 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3001 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3002 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3005 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3006 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3014 /* Arithmetic on HI/LO registers */
3015 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3017 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3028 #if defined(TARGET_MIPS64)
3030 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3034 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3038 #if defined(TARGET_MIPS64)
3040 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3044 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3049 #if defined(TARGET_MIPS64)
3051 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3055 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3058 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3063 #if defined(TARGET_MIPS64)
3065 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3069 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3072 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3078 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3081 TCGv t0
= tcg_const_tl(addr
);
3082 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3083 gen_store_gpr(t0
, reg
);
3087 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3093 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3096 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3097 addr
= addr_add(ctx
, pc
, offset
);
3098 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3102 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3103 addr
= addr_add(ctx
, pc
, offset
);
3104 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3106 #if defined(TARGET_MIPS64)
3109 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3110 addr
= addr_add(ctx
, pc
, offset
);
3111 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3115 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3118 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3119 addr
= addr_add(ctx
, pc
, offset
);
3120 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3125 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3126 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3127 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3130 #if defined(TARGET_MIPS64)
3131 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3132 case R6_OPC_LDPC
+ (1 << 16):
3133 case R6_OPC_LDPC
+ (2 << 16):
3134 case R6_OPC_LDPC
+ (3 << 16):
3136 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3137 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3138 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3142 MIPS_INVAL("OPC_PCREL");
3143 generate_exception_end(ctx
, EXCP_RI
);
3150 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3159 t0
= tcg_temp_new();
3160 t1
= tcg_temp_new();
3162 gen_load_gpr(t0
, rs
);
3163 gen_load_gpr(t1
, rt
);
3168 TCGv t2
= tcg_temp_new();
3169 TCGv t3
= tcg_temp_new();
3170 tcg_gen_ext32s_tl(t0
, t0
);
3171 tcg_gen_ext32s_tl(t1
, t1
);
3172 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3173 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3174 tcg_gen_and_tl(t2
, t2
, t3
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3176 tcg_gen_or_tl(t2
, t2
, t3
);
3177 tcg_gen_movi_tl(t3
, 0);
3178 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3179 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3180 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3187 TCGv t2
= tcg_temp_new();
3188 TCGv t3
= tcg_temp_new();
3189 tcg_gen_ext32s_tl(t0
, t0
);
3190 tcg_gen_ext32s_tl(t1
, t1
);
3191 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3192 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3193 tcg_gen_and_tl(t2
, t2
, t3
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3195 tcg_gen_or_tl(t2
, t2
, t3
);
3196 tcg_gen_movi_tl(t3
, 0);
3197 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3198 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3199 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3206 TCGv t2
= tcg_const_tl(0);
3207 TCGv t3
= tcg_const_tl(1);
3208 tcg_gen_ext32u_tl(t0
, t0
);
3209 tcg_gen_ext32u_tl(t1
, t1
);
3210 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3211 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3212 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3219 TCGv t2
= tcg_const_tl(0);
3220 TCGv t3
= tcg_const_tl(1);
3221 tcg_gen_ext32u_tl(t0
, t0
);
3222 tcg_gen_ext32u_tl(t1
, t1
);
3223 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3224 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3225 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3232 TCGv_i32 t2
= tcg_temp_new_i32();
3233 TCGv_i32 t3
= tcg_temp_new_i32();
3234 tcg_gen_trunc_tl_i32(t2
, t0
);
3235 tcg_gen_trunc_tl_i32(t3
, t1
);
3236 tcg_gen_mul_i32(t2
, t2
, t3
);
3237 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3238 tcg_temp_free_i32(t2
);
3239 tcg_temp_free_i32(t3
);
3244 TCGv_i32 t2
= tcg_temp_new_i32();
3245 TCGv_i32 t3
= tcg_temp_new_i32();
3246 tcg_gen_trunc_tl_i32(t2
, t0
);
3247 tcg_gen_trunc_tl_i32(t3
, t1
);
3248 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3249 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3250 tcg_temp_free_i32(t2
);
3251 tcg_temp_free_i32(t3
);
3256 TCGv_i32 t2
= tcg_temp_new_i32();
3257 TCGv_i32 t3
= tcg_temp_new_i32();
3258 tcg_gen_trunc_tl_i32(t2
, t0
);
3259 tcg_gen_trunc_tl_i32(t3
, t1
);
3260 tcg_gen_mul_i32(t2
, t2
, t3
);
3261 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3262 tcg_temp_free_i32(t2
);
3263 tcg_temp_free_i32(t3
);
3268 TCGv_i32 t2
= tcg_temp_new_i32();
3269 TCGv_i32 t3
= tcg_temp_new_i32();
3270 tcg_gen_trunc_tl_i32(t2
, t0
);
3271 tcg_gen_trunc_tl_i32(t3
, t1
);
3272 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3273 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3274 tcg_temp_free_i32(t2
);
3275 tcg_temp_free_i32(t3
);
3278 #if defined(TARGET_MIPS64)
3281 TCGv t2
= tcg_temp_new();
3282 TCGv t3
= tcg_temp_new();
3283 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3284 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3285 tcg_gen_and_tl(t2
, t2
, t3
);
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3287 tcg_gen_or_tl(t2
, t2
, t3
);
3288 tcg_gen_movi_tl(t3
, 0);
3289 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3290 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3297 TCGv t2
= tcg_temp_new();
3298 TCGv t3
= tcg_temp_new();
3299 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3300 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3301 tcg_gen_and_tl(t2
, t2
, t3
);
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3303 tcg_gen_or_tl(t2
, t2
, t3
);
3304 tcg_gen_movi_tl(t3
, 0);
3305 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3306 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3313 TCGv t2
= tcg_const_tl(0);
3314 TCGv t3
= tcg_const_tl(1);
3315 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3316 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3323 TCGv t2
= tcg_const_tl(0);
3324 TCGv t3
= tcg_const_tl(1);
3325 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3326 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3332 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3336 TCGv t2
= tcg_temp_new();
3337 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3342 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3346 TCGv t2
= tcg_temp_new();
3347 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3353 MIPS_INVAL("r6 mul/div");
3354 generate_exception_end(ctx
, EXCP_RI
);
3362 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3363 int acc
, int rs
, int rt
)
3367 t0
= tcg_temp_new();
3368 t1
= tcg_temp_new();
3370 gen_load_gpr(t0
, rs
);
3371 gen_load_gpr(t1
, rt
);
3380 TCGv t2
= tcg_temp_new();
3381 TCGv t3
= tcg_temp_new();
3382 tcg_gen_ext32s_tl(t0
, t0
);
3383 tcg_gen_ext32s_tl(t1
, t1
);
3384 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3385 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3386 tcg_gen_and_tl(t2
, t2
, t3
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3388 tcg_gen_or_tl(t2
, t2
, t3
);
3389 tcg_gen_movi_tl(t3
, 0);
3390 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3391 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3392 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3393 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3394 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3401 TCGv t2
= tcg_const_tl(0);
3402 TCGv t3
= tcg_const_tl(1);
3403 tcg_gen_ext32u_tl(t0
, t0
);
3404 tcg_gen_ext32u_tl(t1
, t1
);
3405 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3406 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3407 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3408 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3409 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3416 TCGv_i32 t2
= tcg_temp_new_i32();
3417 TCGv_i32 t3
= tcg_temp_new_i32();
3418 tcg_gen_trunc_tl_i32(t2
, t0
);
3419 tcg_gen_trunc_tl_i32(t3
, t1
);
3420 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3421 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3422 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3423 tcg_temp_free_i32(t2
);
3424 tcg_temp_free_i32(t3
);
3429 TCGv_i32 t2
= tcg_temp_new_i32();
3430 TCGv_i32 t3
= tcg_temp_new_i32();
3431 tcg_gen_trunc_tl_i32(t2
, t0
);
3432 tcg_gen_trunc_tl_i32(t3
, t1
);
3433 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3434 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3435 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3436 tcg_temp_free_i32(t2
);
3437 tcg_temp_free_i32(t3
);
3440 #if defined(TARGET_MIPS64)
3443 TCGv t2
= tcg_temp_new();
3444 TCGv t3
= tcg_temp_new();
3445 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3446 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3447 tcg_gen_and_tl(t2
, t2
, t3
);
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3449 tcg_gen_or_tl(t2
, t2
, t3
);
3450 tcg_gen_movi_tl(t3
, 0);
3451 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3452 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3453 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3460 TCGv t2
= tcg_const_tl(0);
3461 TCGv t3
= tcg_const_tl(1);
3462 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3463 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3464 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3470 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3473 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3478 TCGv_i64 t2
= tcg_temp_new_i64();
3479 TCGv_i64 t3
= tcg_temp_new_i64();
3481 tcg_gen_ext_tl_i64(t2
, t0
);
3482 tcg_gen_ext_tl_i64(t3
, t1
);
3483 tcg_gen_mul_i64(t2
, t2
, t3
);
3484 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3485 tcg_gen_add_i64(t2
, t2
, t3
);
3486 tcg_temp_free_i64(t3
);
3487 gen_move_low32(cpu_LO
[acc
], t2
);
3488 gen_move_high32(cpu_HI
[acc
], t2
);
3489 tcg_temp_free_i64(t2
);
3494 TCGv_i64 t2
= tcg_temp_new_i64();
3495 TCGv_i64 t3
= tcg_temp_new_i64();
3497 tcg_gen_ext32u_tl(t0
, t0
);
3498 tcg_gen_ext32u_tl(t1
, t1
);
3499 tcg_gen_extu_tl_i64(t2
, t0
);
3500 tcg_gen_extu_tl_i64(t3
, t1
);
3501 tcg_gen_mul_i64(t2
, t2
, t3
);
3502 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3503 tcg_gen_add_i64(t2
, t2
, t3
);
3504 tcg_temp_free_i64(t3
);
3505 gen_move_low32(cpu_LO
[acc
], t2
);
3506 gen_move_high32(cpu_HI
[acc
], t2
);
3507 tcg_temp_free_i64(t2
);
3512 TCGv_i64 t2
= tcg_temp_new_i64();
3513 TCGv_i64 t3
= tcg_temp_new_i64();
3515 tcg_gen_ext_tl_i64(t2
, t0
);
3516 tcg_gen_ext_tl_i64(t3
, t1
);
3517 tcg_gen_mul_i64(t2
, t2
, t3
);
3518 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3519 tcg_gen_sub_i64(t2
, t3
, t2
);
3520 tcg_temp_free_i64(t3
);
3521 gen_move_low32(cpu_LO
[acc
], t2
);
3522 gen_move_high32(cpu_HI
[acc
], t2
);
3523 tcg_temp_free_i64(t2
);
3528 TCGv_i64 t2
= tcg_temp_new_i64();
3529 TCGv_i64 t3
= tcg_temp_new_i64();
3531 tcg_gen_ext32u_tl(t0
, t0
);
3532 tcg_gen_ext32u_tl(t1
, t1
);
3533 tcg_gen_extu_tl_i64(t2
, t0
);
3534 tcg_gen_extu_tl_i64(t3
, t1
);
3535 tcg_gen_mul_i64(t2
, t2
, t3
);
3536 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3537 tcg_gen_sub_i64(t2
, t3
, t2
);
3538 tcg_temp_free_i64(t3
);
3539 gen_move_low32(cpu_LO
[acc
], t2
);
3540 gen_move_high32(cpu_HI
[acc
], t2
);
3541 tcg_temp_free_i64(t2
);
3545 MIPS_INVAL("mul/div");
3546 generate_exception_end(ctx
, EXCP_RI
);
3554 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3555 int rd
, int rs
, int rt
)
3557 TCGv t0
= tcg_temp_new();
3558 TCGv t1
= tcg_temp_new();
3560 gen_load_gpr(t0
, rs
);
3561 gen_load_gpr(t1
, rt
);
3564 case OPC_VR54XX_MULS
:
3565 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MULSU
:
3568 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MACC
:
3571 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MACCU
:
3574 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MSAC
:
3577 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MSACU
:
3580 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULHI
:
3583 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULHIU
:
3586 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MULSHI
:
3589 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MULSHIU
:
3592 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MACCHI
:
3595 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MACCHIU
:
3598 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3600 case OPC_VR54XX_MSACHI
:
3601 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3603 case OPC_VR54XX_MSACHIU
:
3604 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3607 MIPS_INVAL("mul vr54xx");
3608 generate_exception_end(ctx
, EXCP_RI
);
3611 gen_store_gpr(t0
, rd
);
3618 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3627 t0
= tcg_temp_new();
3628 gen_load_gpr(t0
, rs
);
3632 gen_helper_clo(cpu_gpr
[rd
], t0
);
3636 gen_helper_clz(cpu_gpr
[rd
], t0
);
3638 #if defined(TARGET_MIPS64)
3641 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3645 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3652 /* Godson integer instructions */
3653 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3654 int rd
, int rs
, int rt
)
3666 case OPC_MULTU_G_2E
:
3667 case OPC_MULTU_G_2F
:
3668 #if defined(TARGET_MIPS64)
3669 case OPC_DMULT_G_2E
:
3670 case OPC_DMULT_G_2F
:
3671 case OPC_DMULTU_G_2E
:
3672 case OPC_DMULTU_G_2F
:
3674 t0
= tcg_temp_new();
3675 t1
= tcg_temp_new();
3678 t0
= tcg_temp_local_new();
3679 t1
= tcg_temp_local_new();
3683 gen_load_gpr(t0
, rs
);
3684 gen_load_gpr(t1
, rt
);
3689 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3690 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3692 case OPC_MULTU_G_2E
:
3693 case OPC_MULTU_G_2F
:
3694 tcg_gen_ext32u_tl(t0
, t0
);
3695 tcg_gen_ext32u_tl(t1
, t1
);
3696 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3697 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3702 TCGLabel
*l1
= gen_new_label();
3703 TCGLabel
*l2
= gen_new_label();
3704 TCGLabel
*l3
= gen_new_label();
3705 tcg_gen_ext32s_tl(t0
, t0
);
3706 tcg_gen_ext32s_tl(t1
, t1
);
3707 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3708 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3711 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3712 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3713 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3716 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3717 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3724 TCGLabel
*l1
= gen_new_label();
3725 TCGLabel
*l2
= gen_new_label();
3726 tcg_gen_ext32u_tl(t0
, t0
);
3727 tcg_gen_ext32u_tl(t1
, t1
);
3728 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3729 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3732 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3733 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3740 TCGLabel
*l1
= gen_new_label();
3741 TCGLabel
*l2
= gen_new_label();
3742 TCGLabel
*l3
= gen_new_label();
3743 tcg_gen_ext32u_tl(t0
, t0
);
3744 tcg_gen_ext32u_tl(t1
, t1
);
3745 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3746 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3747 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3749 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3752 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3753 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3760 TCGLabel
*l1
= gen_new_label();
3761 TCGLabel
*l2
= gen_new_label();
3762 tcg_gen_ext32u_tl(t0
, t0
);
3763 tcg_gen_ext32u_tl(t1
, t1
);
3764 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3765 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3768 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3769 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3773 #if defined(TARGET_MIPS64)
3774 case OPC_DMULT_G_2E
:
3775 case OPC_DMULT_G_2F
:
3776 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3778 case OPC_DMULTU_G_2E
:
3779 case OPC_DMULTU_G_2F
:
3780 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3785 TCGLabel
*l1
= gen_new_label();
3786 TCGLabel
*l2
= gen_new_label();
3787 TCGLabel
*l3
= gen_new_label();
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3793 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3794 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3797 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3801 case OPC_DDIVU_G_2E
:
3802 case OPC_DDIVU_G_2F
:
3804 TCGLabel
*l1
= gen_new_label();
3805 TCGLabel
*l2
= gen_new_label();
3806 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3807 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3810 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3817 TCGLabel
*l1
= gen_new_label();
3818 TCGLabel
*l2
= gen_new_label();
3819 TCGLabel
*l3
= gen_new_label();
3820 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3821 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3822 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3824 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3827 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3831 case OPC_DMODU_G_2E
:
3832 case OPC_DMODU_G_2F
:
3834 TCGLabel
*l1
= gen_new_label();
3835 TCGLabel
*l2
= gen_new_label();
3836 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3837 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3840 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3851 /* Loongson multimedia instructions */
3852 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3854 uint32_t opc
, shift_max
;
3857 opc
= MASK_LMI(ctx
->opcode
);
3863 t0
= tcg_temp_local_new_i64();
3864 t1
= tcg_temp_local_new_i64();
3867 t0
= tcg_temp_new_i64();
3868 t1
= tcg_temp_new_i64();
3872 gen_load_fpr64(ctx
, t0
, rs
);
3873 gen_load_fpr64(ctx
, t1
, rt
);
3875 #define LMI_HELPER(UP, LO) \
3876 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3877 #define LMI_HELPER_1(UP, LO) \
3878 case OPC_##UP: gen_helper_##LO(t0, t0); break
3879 #define LMI_DIRECT(UP, LO, OP) \
3880 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3883 LMI_HELPER(PADDSH
, paddsh
);
3884 LMI_HELPER(PADDUSH
, paddush
);
3885 LMI_HELPER(PADDH
, paddh
);
3886 LMI_HELPER(PADDW
, paddw
);
3887 LMI_HELPER(PADDSB
, paddsb
);
3888 LMI_HELPER(PADDUSB
, paddusb
);
3889 LMI_HELPER(PADDB
, paddb
);
3891 LMI_HELPER(PSUBSH
, psubsh
);
3892 LMI_HELPER(PSUBUSH
, psubush
);
3893 LMI_HELPER(PSUBH
, psubh
);
3894 LMI_HELPER(PSUBW
, psubw
);
3895 LMI_HELPER(PSUBSB
, psubsb
);
3896 LMI_HELPER(PSUBUSB
, psubusb
);
3897 LMI_HELPER(PSUBB
, psubb
);
3899 LMI_HELPER(PSHUFH
, pshufh
);
3900 LMI_HELPER(PACKSSWH
, packsswh
);
3901 LMI_HELPER(PACKSSHB
, packsshb
);
3902 LMI_HELPER(PACKUSHB
, packushb
);
3904 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3905 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3906 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3907 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3908 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3909 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3911 LMI_HELPER(PAVGH
, pavgh
);
3912 LMI_HELPER(PAVGB
, pavgb
);
3913 LMI_HELPER(PMAXSH
, pmaxsh
);
3914 LMI_HELPER(PMINSH
, pminsh
);
3915 LMI_HELPER(PMAXUB
, pmaxub
);
3916 LMI_HELPER(PMINUB
, pminub
);
3918 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3919 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3920 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3921 LMI_HELPER(PCMPGTH
, pcmpgth
);
3922 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3923 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3925 LMI_HELPER(PSLLW
, psllw
);
3926 LMI_HELPER(PSLLH
, psllh
);
3927 LMI_HELPER(PSRLW
, psrlw
);
3928 LMI_HELPER(PSRLH
, psrlh
);
3929 LMI_HELPER(PSRAW
, psraw
);
3930 LMI_HELPER(PSRAH
, psrah
);
3932 LMI_HELPER(PMULLH
, pmullh
);
3933 LMI_HELPER(PMULHH
, pmulhh
);
3934 LMI_HELPER(PMULHUH
, pmulhuh
);
3935 LMI_HELPER(PMADDHW
, pmaddhw
);
3937 LMI_HELPER(PASUBUB
, pasubub
);
3938 LMI_HELPER_1(BIADD
, biadd
);
3939 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3941 LMI_DIRECT(PADDD
, paddd
, add
);
3942 LMI_DIRECT(PSUBD
, psubd
, sub
);
3943 LMI_DIRECT(XOR_CP2
, xor, xor);
3944 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3945 LMI_DIRECT(AND_CP2
, and, and);
3946 LMI_DIRECT(PANDN
, pandn
, andc
);
3947 LMI_DIRECT(OR
, or, or);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3953 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3956 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3959 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3963 tcg_gen_andi_i64(t1
, t1
, 3);
3964 tcg_gen_shli_i64(t1
, t1
, 4);
3965 tcg_gen_shr_i64(t0
, t0
, t1
);
3966 tcg_gen_ext16u_i64(t0
, t0
);
3970 tcg_gen_add_i64(t0
, t0
, t1
);
3971 tcg_gen_ext32s_i64(t0
, t0
);
3974 tcg_gen_sub_i64(t0
, t0
, t1
);
3975 tcg_gen_ext32s_i64(t0
, t0
);
3997 /* Make sure shift count isn't TCG undefined behaviour. */
3998 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4003 tcg_gen_shl_i64(t0
, t0
, t1
);
4007 /* Since SRA is UndefinedResult without sign-extended inputs,
4008 we can treat SRA and DSRA the same. */
4009 tcg_gen_sar_i64(t0
, t0
, t1
);
4012 /* We want to shift in zeros for SRL; zero-extend first. */
4013 tcg_gen_ext32u_i64(t0
, t0
);
4016 tcg_gen_shr_i64(t0
, t0
, t1
);
4020 if (shift_max
== 32) {
4021 tcg_gen_ext32s_i64(t0
, t0
);
4024 /* Shifts larger than MAX produce zero. */
4025 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4026 tcg_gen_neg_i64(t1
, t1
);
4027 tcg_gen_and_i64(t0
, t0
, t1
);
4033 TCGv_i64 t2
= tcg_temp_new_i64();
4034 TCGLabel
*lab
= gen_new_label();
4036 tcg_gen_mov_i64(t2
, t0
);
4037 tcg_gen_add_i64(t0
, t1
, t2
);
4038 if (opc
== OPC_ADD_CP2
) {
4039 tcg_gen_ext32s_i64(t0
, t0
);
4041 tcg_gen_xor_i64(t1
, t1
, t2
);
4042 tcg_gen_xor_i64(t2
, t2
, t0
);
4043 tcg_gen_andc_i64(t1
, t2
, t1
);
4044 tcg_temp_free_i64(t2
);
4045 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4046 generate_exception(ctx
, EXCP_OVERFLOW
);
4054 TCGv_i64 t2
= tcg_temp_new_i64();
4055 TCGLabel
*lab
= gen_new_label();
4057 tcg_gen_mov_i64(t2
, t0
);
4058 tcg_gen_sub_i64(t0
, t1
, t2
);
4059 if (opc
== OPC_SUB_CP2
) {
4060 tcg_gen_ext32s_i64(t0
, t0
);
4062 tcg_gen_xor_i64(t1
, t1
, t2
);
4063 tcg_gen_xor_i64(t2
, t2
, t0
);
4064 tcg_gen_and_i64(t1
, t1
, t2
);
4065 tcg_temp_free_i64(t2
);
4066 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4067 generate_exception(ctx
, EXCP_OVERFLOW
);
4073 tcg_gen_ext32u_i64(t0
, t0
);
4074 tcg_gen_ext32u_i64(t1
, t1
);
4075 tcg_gen_mul_i64(t0
, t0
, t1
);
4084 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4085 FD field is the CC field? */
4087 MIPS_INVAL("loongson_cp2");
4088 generate_exception_end(ctx
, EXCP_RI
);
4095 gen_store_fpr64(ctx
, t0
, rd
);
4097 tcg_temp_free_i64(t0
);
4098 tcg_temp_free_i64(t1
);
4102 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4103 int rs
, int rt
, int16_t imm
)
4106 TCGv t0
= tcg_temp_new();
4107 TCGv t1
= tcg_temp_new();
4110 /* Load needed operands */
4118 /* Compare two registers */
4120 gen_load_gpr(t0
, rs
);
4121 gen_load_gpr(t1
, rt
);
4131 /* Compare register to immediate */
4132 if (rs
!= 0 || imm
!= 0) {
4133 gen_load_gpr(t0
, rs
);
4134 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4141 case OPC_TEQ
: /* rs == rs */
4142 case OPC_TEQI
: /* r0 == 0 */
4143 case OPC_TGE
: /* rs >= rs */
4144 case OPC_TGEI
: /* r0 >= 0 */
4145 case OPC_TGEU
: /* rs >= rs unsigned */
4146 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4148 generate_exception_end(ctx
, EXCP_TRAP
);
4150 case OPC_TLT
: /* rs < rs */
4151 case OPC_TLTI
: /* r0 < 0 */
4152 case OPC_TLTU
: /* rs < rs unsigned */
4153 case OPC_TLTIU
: /* r0 < 0 unsigned */
4154 case OPC_TNE
: /* rs != rs */
4155 case OPC_TNEI
: /* r0 != 0 */
4156 /* Never trap: treat as NOP. */
4160 TCGLabel
*l1
= gen_new_label();
4165 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4169 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4173 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4177 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4181 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4185 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4188 generate_exception(ctx
, EXCP_TRAP
);
4195 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4197 if (unlikely(ctx
->singlestep_enabled
)) {
4201 #ifndef CONFIG_USER_ONLY
4202 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4208 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4210 if (use_goto_tb(ctx
, dest
)) {
4213 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4216 if (ctx
->singlestep_enabled
) {
4217 save_cpu_state(ctx
, 0);
4218 gen_helper_raise_exception_debug(cpu_env
);
4224 /* Branches (before delay slot) */
4225 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4227 int rs
, int rt
, int32_t offset
,
4230 target_ulong btgt
= -1;
4232 int bcond_compute
= 0;
4233 TCGv t0
= tcg_temp_new();
4234 TCGv t1
= tcg_temp_new();
4236 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4237 #ifdef MIPS_DEBUG_DISAS
4238 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4239 TARGET_FMT_lx
"\n", ctx
->pc
);
4241 generate_exception_end(ctx
, EXCP_RI
);
4245 /* Load needed operands */
4251 /* Compare two registers */
4253 gen_load_gpr(t0
, rs
);
4254 gen_load_gpr(t1
, rt
);
4257 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4271 /* Compare to zero */
4273 gen_load_gpr(t0
, rs
);
4276 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4279 #if defined(TARGET_MIPS64)
4281 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4283 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4286 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4291 /* Jump to immediate */
4292 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4296 /* Jump to register */
4297 if (offset
!= 0 && offset
!= 16) {
4298 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4299 others are reserved. */
4300 MIPS_INVAL("jump hint");
4301 generate_exception_end(ctx
, EXCP_RI
);
4304 gen_load_gpr(btarget
, rs
);
4307 MIPS_INVAL("branch/jump");
4308 generate_exception_end(ctx
, EXCP_RI
);
4311 if (bcond_compute
== 0) {
4312 /* No condition to be computed */
4314 case OPC_BEQ
: /* rx == rx */
4315 case OPC_BEQL
: /* rx == rx likely */
4316 case OPC_BGEZ
: /* 0 >= 0 */
4317 case OPC_BGEZL
: /* 0 >= 0 likely */
4318 case OPC_BLEZ
: /* 0 <= 0 */
4319 case OPC_BLEZL
: /* 0 <= 0 likely */
4321 ctx
->hflags
|= MIPS_HFLAG_B
;
4323 case OPC_BGEZAL
: /* 0 >= 0 */
4324 case OPC_BGEZALL
: /* 0 >= 0 likely */
4325 /* Always take and link */
4327 ctx
->hflags
|= MIPS_HFLAG_B
;
4329 case OPC_BNE
: /* rx != rx */
4330 case OPC_BGTZ
: /* 0 > 0 */
4331 case OPC_BLTZ
: /* 0 < 0 */
4334 case OPC_BLTZAL
: /* 0 < 0 */
4335 /* Handle as an unconditional branch to get correct delay
4338 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4339 ctx
->hflags
|= MIPS_HFLAG_B
;
4341 case OPC_BLTZALL
: /* 0 < 0 likely */
4342 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4343 /* Skip the instruction in the delay slot */
4346 case OPC_BNEL
: /* rx != rx likely */
4347 case OPC_BGTZL
: /* 0 > 0 likely */
4348 case OPC_BLTZL
: /* 0 < 0 likely */
4349 /* Skip the instruction in the delay slot */
4353 ctx
->hflags
|= MIPS_HFLAG_B
;
4356 ctx
->hflags
|= MIPS_HFLAG_BX
;
4360 ctx
->hflags
|= MIPS_HFLAG_B
;
4363 ctx
->hflags
|= MIPS_HFLAG_BR
;
4367 ctx
->hflags
|= MIPS_HFLAG_BR
;
4370 MIPS_INVAL("branch/jump");
4371 generate_exception_end(ctx
, EXCP_RI
);
4377 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4380 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4383 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4386 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4389 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4392 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4395 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4403 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4406 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4409 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4412 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4415 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4418 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4421 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4423 #if defined(TARGET_MIPS64)
4425 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4429 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4432 ctx
->hflags
|= MIPS_HFLAG_BC
;
4435 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4438 ctx
->hflags
|= MIPS_HFLAG_BL
;
4441 MIPS_INVAL("conditional branch/jump");
4442 generate_exception_end(ctx
, EXCP_RI
);
4447 ctx
->btarget
= btgt
;
4449 switch (delayslot_size
) {
4451 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4454 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4459 int post_delay
= insn_bytes
+ delayslot_size
;
4460 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4462 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4466 if (insn_bytes
== 2)
4467 ctx
->hflags
|= MIPS_HFLAG_B16
;
4472 /* special3 bitfield operations */
4473 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4474 int rs
, int lsb
, int msb
)
4476 TCGv t0
= tcg_temp_new();
4477 TCGv t1
= tcg_temp_new();
4479 gen_load_gpr(t1
, rs
);
4482 if (lsb
+ msb
> 31) {
4485 tcg_gen_shri_tl(t0
, t1
, lsb
);
4487 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4489 tcg_gen_ext32s_tl(t0
, t0
);
4492 #if defined(TARGET_MIPS64)
4501 if (lsb
+ msb
> 63) {
4504 tcg_gen_shri_tl(t0
, t1
, lsb
);
4506 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4514 gen_load_gpr(t0
, rt
);
4515 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4516 tcg_gen_ext32s_tl(t0
, t0
);
4518 #if defined(TARGET_MIPS64)
4529 gen_load_gpr(t0
, rt
);
4530 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4535 MIPS_INVAL("bitops");
4536 generate_exception_end(ctx
, EXCP_RI
);
4541 gen_store_gpr(t0
, rt
);
4546 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4551 /* If no destination, treat it as a NOP. */
4555 t0
= tcg_temp_new();
4556 gen_load_gpr(t0
, rt
);
4560 TCGv t1
= tcg_temp_new();
4562 tcg_gen_shri_tl(t1
, t0
, 8);
4563 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4564 tcg_gen_shli_tl(t0
, t0
, 8);
4565 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4566 tcg_gen_or_tl(t0
, t0
, t1
);
4568 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4572 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4575 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4577 #if defined(TARGET_MIPS64)
4580 TCGv t1
= tcg_temp_new();
4582 tcg_gen_shri_tl(t1
, t0
, 8);
4583 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4584 tcg_gen_shli_tl(t0
, t0
, 8);
4585 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4586 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4592 TCGv t1
= tcg_temp_new();
4594 tcg_gen_shri_tl(t1
, t0
, 16);
4595 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4596 tcg_gen_shli_tl(t0
, t0
, 16);
4597 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4598 tcg_gen_or_tl(t0
, t0
, t1
);
4599 tcg_gen_shri_tl(t1
, t0
, 32);
4600 tcg_gen_shli_tl(t0
, t0
, 32);
4601 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4607 MIPS_INVAL("bsfhl");
4608 generate_exception_end(ctx
, EXCP_RI
);
4615 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4624 t0
= tcg_temp_new();
4625 t1
= tcg_temp_new();
4626 gen_load_gpr(t0
, rs
);
4627 gen_load_gpr(t1
, rt
);
4628 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4629 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4630 if (opc
== OPC_LSA
) {
4631 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4640 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4648 t0
= tcg_temp_new();
4649 gen_load_gpr(t0
, rt
);
4653 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4655 #if defined(TARGET_MIPS64)
4657 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4662 TCGv t1
= tcg_temp_new();
4663 gen_load_gpr(t1
, rs
);
4667 TCGv_i64 t2
= tcg_temp_new_i64();
4668 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4669 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4670 gen_move_low32(cpu_gpr
[rd
], t2
);
4671 tcg_temp_free_i64(t2
);
4674 #if defined(TARGET_MIPS64)
4676 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4677 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4678 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4688 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4695 t0
= tcg_temp_new();
4696 gen_load_gpr(t0
, rt
);
4699 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4701 #if defined(TARGET_MIPS64)
4703 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4710 #ifndef CONFIG_USER_ONLY
4711 /* CP0 (MMU and control) */
4712 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4714 TCGv_i64 t0
= tcg_temp_new_i64();
4715 TCGv_i64 t1
= tcg_temp_new_i64();
4717 tcg_gen_ext_tl_i64(t0
, arg
);
4718 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4719 #if defined(TARGET_MIPS64)
4720 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4722 tcg_gen_concat32_i64(t1
, t1
, t0
);
4724 tcg_gen_st_i64(t1
, cpu_env
, off
);
4725 tcg_temp_free_i64(t1
);
4726 tcg_temp_free_i64(t0
);
4729 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4731 TCGv_i64 t0
= tcg_temp_new_i64();
4732 TCGv_i64 t1
= tcg_temp_new_i64();
4734 tcg_gen_ext_tl_i64(t0
, arg
);
4735 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4736 tcg_gen_concat32_i64(t1
, t1
, t0
);
4737 tcg_gen_st_i64(t1
, cpu_env
, off
);
4738 tcg_temp_free_i64(t1
);
4739 tcg_temp_free_i64(t0
);
4742 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4744 TCGv_i64 t0
= tcg_temp_new_i64();
4746 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4747 #if defined(TARGET_MIPS64)
4748 tcg_gen_shri_i64(t0
, t0
, 30);
4750 tcg_gen_shri_i64(t0
, t0
, 32);
4752 gen_move_low32(arg
, t0
);
4753 tcg_temp_free_i64(t0
);
4756 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4758 TCGv_i64 t0
= tcg_temp_new_i64();
4760 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4761 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4762 gen_move_low32(arg
, t0
);
4763 tcg_temp_free_i64(t0
);
4766 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4768 TCGv_i32 t0
= tcg_temp_new_i32();
4770 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4771 tcg_gen_ext_i32_tl(arg
, t0
);
4772 tcg_temp_free_i32(t0
);
4775 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4777 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4778 tcg_gen_ext32s_tl(arg
, arg
);
4781 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4783 TCGv_i32 t0
= tcg_temp_new_i32();
4785 tcg_gen_trunc_tl_i32(t0
, arg
);
4786 tcg_gen_st_i32(t0
, cpu_env
, off
);
4787 tcg_temp_free_i32(t0
);
4790 #define CP0_CHECK(c) \
4793 goto cp0_unimplemented; \
4797 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4799 const char *rn
= "invalid";
4801 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4807 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4811 goto cp0_unimplemented
;
4817 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4821 goto cp0_unimplemented
;
4827 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4828 ctx
->CP0_LLAddr_shift
);
4832 CP0_CHECK(ctx
->mrp
);
4833 gen_helper_mfhc0_maar(arg
, cpu_env
);
4837 goto cp0_unimplemented
;
4846 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4850 goto cp0_unimplemented
;
4854 goto cp0_unimplemented
;
4857 (void)rn
; /* avoid a compiler warning */
4858 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4862 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4863 tcg_gen_movi_tl(arg
, 0);
4866 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4868 const char *rn
= "invalid";
4869 uint64_t mask
= ctx
->PAMask
>> 36;
4871 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4877 tcg_gen_andi_tl(arg
, arg
, mask
);
4878 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4882 goto cp0_unimplemented
;
4888 tcg_gen_andi_tl(arg
, arg
, mask
);
4889 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4893 goto cp0_unimplemented
;
4899 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4900 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4901 relevant for modern MIPS cores supporting MTHC0, therefore
4902 treating MTHC0 to LLAddr as NOP. */
4906 CP0_CHECK(ctx
->mrp
);
4907 gen_helper_mthc0_maar(cpu_env
, arg
);
4911 goto cp0_unimplemented
;
4920 tcg_gen_andi_tl(arg
, arg
, mask
);
4921 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4925 goto cp0_unimplemented
;
4929 goto cp0_unimplemented
;
4932 (void)rn
; /* avoid a compiler warning */
4934 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4937 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4939 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4940 tcg_gen_movi_tl(arg
, 0);
4942 tcg_gen_movi_tl(arg
, ~0);
4946 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4948 const char *rn
= "invalid";
4951 check_insn(ctx
, ISA_MIPS32
);
4957 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4961 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4962 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4966 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4967 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4971 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4972 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4977 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4981 goto cp0_unimplemented
;
4987 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4988 gen_helper_mfc0_random(arg
, cpu_env
);
4992 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4993 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4997 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4998 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5002 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5003 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5007 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5008 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5013 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5018 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5019 rn
= "VPEScheFBack";
5022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5023 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5027 goto cp0_unimplemented
;
5034 TCGv_i64 tmp
= tcg_temp_new_i64();
5035 tcg_gen_ld_i64(tmp
, cpu_env
,
5036 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5037 #if defined(TARGET_MIPS64)
5039 /* Move RI/XI fields to bits 31:30 */
5040 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5041 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5044 gen_move_low32(arg
, tmp
);
5045 tcg_temp_free_i64(tmp
);
5050 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5051 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5055 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5056 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5060 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5061 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5065 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5066 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5070 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5071 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5075 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5076 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5080 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5081 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5085 goto cp0_unimplemented
;
5092 TCGv_i64 tmp
= tcg_temp_new_i64();
5093 tcg_gen_ld_i64(tmp
, cpu_env
,
5094 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5095 #if defined(TARGET_MIPS64)
5097 /* Move RI/XI fields to bits 31:30 */
5098 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5099 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5102 gen_move_low32(arg
, tmp
);
5103 tcg_temp_free_i64(tmp
);
5109 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5110 rn
= "GlobalNumber";
5113 goto cp0_unimplemented
;
5119 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5120 tcg_gen_ext32s_tl(arg
, arg
);
5124 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5125 rn
= "ContextConfig";
5126 goto cp0_unimplemented
;
5129 CP0_CHECK(ctx
->ulri
);
5130 tcg_gen_ld32s_tl(arg
, cpu_env
,
5131 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5135 goto cp0_unimplemented
;
5141 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5145 check_insn(ctx
, ISA_MIPS32R2
);
5146 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5150 goto cp0_unimplemented
;
5156 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5160 check_insn(ctx
, ISA_MIPS32R2
);
5161 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5165 check_insn(ctx
, ISA_MIPS32R2
);
5166 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5170 check_insn(ctx
, ISA_MIPS32R2
);
5171 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5175 check_insn(ctx
, ISA_MIPS32R2
);
5176 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5180 check_insn(ctx
, ISA_MIPS32R2
);
5181 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5185 goto cp0_unimplemented
;
5191 check_insn(ctx
, ISA_MIPS32R2
);
5192 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5196 goto cp0_unimplemented
;
5202 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5203 tcg_gen_ext32s_tl(arg
, arg
);
5208 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5213 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5217 goto cp0_unimplemented
;
5223 /* Mark as an IO operation because we read the time. */
5224 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5227 gen_helper_mfc0_count(arg
, cpu_env
);
5228 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5231 /* Break the TB to be able to take timer interrupts immediately
5232 after reading count. */
5233 ctx
->bstate
= BS_STOP
;
5236 /* 6,7 are implementation dependent */
5238 goto cp0_unimplemented
;
5244 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5245 tcg_gen_ext32s_tl(arg
, arg
);
5249 goto cp0_unimplemented
;
5255 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5258 /* 6,7 are implementation dependent */
5260 goto cp0_unimplemented
;
5266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5270 check_insn(ctx
, ISA_MIPS32R2
);
5271 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5275 check_insn(ctx
, ISA_MIPS32R2
);
5276 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5280 check_insn(ctx
, ISA_MIPS32R2
);
5281 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5285 goto cp0_unimplemented
;
5291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5295 goto cp0_unimplemented
;
5301 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5302 tcg_gen_ext32s_tl(arg
, arg
);
5306 goto cp0_unimplemented
;
5312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5316 check_insn(ctx
, ISA_MIPS32R2
);
5317 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5321 check_insn(ctx
, ISA_MIPS32R2
);
5322 CP0_CHECK(ctx
->cmgcr
);
5323 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5324 tcg_gen_ext32s_tl(arg
, arg
);
5328 goto cp0_unimplemented
;
5334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5338 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5346 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5350 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5354 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5357 /* 6,7 are implementation dependent */
5359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5363 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5367 goto cp0_unimplemented
;
5373 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5377 CP0_CHECK(ctx
->mrp
);
5378 gen_helper_mfc0_maar(arg
, cpu_env
);
5382 CP0_CHECK(ctx
->mrp
);
5383 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5387 goto cp0_unimplemented
;
5393 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5397 goto cp0_unimplemented
;
5403 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5407 goto cp0_unimplemented
;
5413 #if defined(TARGET_MIPS64)
5414 check_insn(ctx
, ISA_MIPS3
);
5415 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5416 tcg_gen_ext32s_tl(arg
, arg
);
5421 goto cp0_unimplemented
;
5425 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5426 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5433 goto cp0_unimplemented
;
5437 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5438 rn
= "'Diagnostic"; /* implementation dependent */
5443 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5447 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5448 rn
= "TraceControl";
5451 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5452 rn
= "TraceControl2";
5455 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5456 rn
= "UserTraceData";
5459 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5463 goto cp0_unimplemented
;
5470 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5471 tcg_gen_ext32s_tl(arg
, arg
);
5475 goto cp0_unimplemented
;
5481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5482 rn
= "Performance0";
5485 // gen_helper_mfc0_performance1(arg);
5486 rn
= "Performance1";
5489 // gen_helper_mfc0_performance2(arg);
5490 rn
= "Performance2";
5493 // gen_helper_mfc0_performance3(arg);
5494 rn
= "Performance3";
5497 // gen_helper_mfc0_performance4(arg);
5498 rn
= "Performance4";
5501 // gen_helper_mfc0_performance5(arg);
5502 rn
= "Performance5";
5505 // gen_helper_mfc0_performance6(arg);
5506 rn
= "Performance6";
5509 // gen_helper_mfc0_performance7(arg);
5510 rn
= "Performance7";
5513 goto cp0_unimplemented
;
5519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5523 goto cp0_unimplemented
;
5529 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5533 goto cp0_unimplemented
;
5543 TCGv_i64 tmp
= tcg_temp_new_i64();
5544 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5545 gen_move_low32(arg
, tmp
);
5546 tcg_temp_free_i64(tmp
);
5554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5558 goto cp0_unimplemented
;
5567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5578 goto cp0_unimplemented
;
5584 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5585 tcg_gen_ext32s_tl(arg
, arg
);
5589 goto cp0_unimplemented
;
5596 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5600 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5601 tcg_gen_ld_tl(arg
, cpu_env
,
5602 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5603 tcg_gen_ext32s_tl(arg
, arg
);
5607 goto cp0_unimplemented
;
5611 goto cp0_unimplemented
;
5613 (void)rn
; /* avoid a compiler warning */
5614 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5618 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5619 gen_mfc0_unimplemented(ctx
, arg
);
5622 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5624 const char *rn
= "invalid";
5627 check_insn(ctx
, ISA_MIPS32
);
5629 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5637 gen_helper_mtc0_index(cpu_env
, arg
);
5641 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5642 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5646 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5651 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5661 goto cp0_unimplemented
;
5671 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5672 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5682 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5687 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5691 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5692 tcg_gen_st_tl(arg
, cpu_env
,
5693 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5697 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5698 tcg_gen_st_tl(arg
, cpu_env
,
5699 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5700 rn
= "VPEScheFBack";
5703 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5704 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5708 goto cp0_unimplemented
;
5714 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5718 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5719 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5723 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5724 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5728 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5729 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5733 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5734 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5738 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5739 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5743 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5744 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5748 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5749 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5753 goto cp0_unimplemented
;
5759 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5765 rn
= "GlobalNumber";
5768 goto cp0_unimplemented
;
5774 gen_helper_mtc0_context(cpu_env
, arg
);
5778 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5779 rn
= "ContextConfig";
5780 goto cp0_unimplemented
;
5783 CP0_CHECK(ctx
->ulri
);
5784 tcg_gen_st_tl(arg
, cpu_env
,
5785 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5789 goto cp0_unimplemented
;
5795 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5799 check_insn(ctx
, ISA_MIPS32R2
);
5800 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5802 ctx
->bstate
= BS_STOP
;
5805 goto cp0_unimplemented
;
5811 gen_helper_mtc0_wired(cpu_env
, arg
);
5815 check_insn(ctx
, ISA_MIPS32R2
);
5816 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5820 check_insn(ctx
, ISA_MIPS32R2
);
5821 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5825 check_insn(ctx
, ISA_MIPS32R2
);
5826 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5830 check_insn(ctx
, ISA_MIPS32R2
);
5831 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5835 check_insn(ctx
, ISA_MIPS32R2
);
5836 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5840 goto cp0_unimplemented
;
5846 check_insn(ctx
, ISA_MIPS32R2
);
5847 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5848 ctx
->bstate
= BS_STOP
;
5852 goto cp0_unimplemented
;
5870 goto cp0_unimplemented
;
5876 gen_helper_mtc0_count(cpu_env
, arg
);
5879 /* 6,7 are implementation dependent */
5881 goto cp0_unimplemented
;
5887 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5891 goto cp0_unimplemented
;
5897 gen_helper_mtc0_compare(cpu_env
, arg
);
5900 /* 6,7 are implementation dependent */
5902 goto cp0_unimplemented
;
5908 save_cpu_state(ctx
, 1);
5909 gen_helper_mtc0_status(cpu_env
, arg
);
5910 /* BS_STOP isn't good enough here, hflags may have changed. */
5911 gen_save_pc(ctx
->pc
+ 4);
5912 ctx
->bstate
= BS_EXCP
;
5916 check_insn(ctx
, ISA_MIPS32R2
);
5917 gen_helper_mtc0_intctl(cpu_env
, arg
);
5918 /* Stop translation as we may have switched the execution mode */
5919 ctx
->bstate
= BS_STOP
;
5923 check_insn(ctx
, ISA_MIPS32R2
);
5924 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5925 /* Stop translation as we may have switched the execution mode */
5926 ctx
->bstate
= BS_STOP
;
5930 check_insn(ctx
, ISA_MIPS32R2
);
5931 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5932 /* Stop translation as we may have switched the execution mode */
5933 ctx
->bstate
= BS_STOP
;
5937 goto cp0_unimplemented
;
5943 save_cpu_state(ctx
, 1);
5944 gen_helper_mtc0_cause(cpu_env
, arg
);
5948 goto cp0_unimplemented
;
5954 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5958 goto cp0_unimplemented
;
5968 check_insn(ctx
, ISA_MIPS32R2
);
5969 gen_helper_mtc0_ebase(cpu_env
, arg
);
5973 goto cp0_unimplemented
;
5979 gen_helper_mtc0_config0(cpu_env
, arg
);
5981 /* Stop translation as we may have switched the execution mode */
5982 ctx
->bstate
= BS_STOP
;
5985 /* ignored, read only */
5989 gen_helper_mtc0_config2(cpu_env
, arg
);
5991 /* Stop translation as we may have switched the execution mode */
5992 ctx
->bstate
= BS_STOP
;
5995 gen_helper_mtc0_config3(cpu_env
, arg
);
5997 /* Stop translation as we may have switched the execution mode */
5998 ctx
->bstate
= BS_STOP
;
6001 gen_helper_mtc0_config4(cpu_env
, arg
);
6003 ctx
->bstate
= BS_STOP
;
6006 gen_helper_mtc0_config5(cpu_env
, arg
);
6008 /* Stop translation as we may have switched the execution mode */
6009 ctx
->bstate
= BS_STOP
;
6011 /* 6,7 are implementation dependent */
6021 rn
= "Invalid config selector";
6022 goto cp0_unimplemented
;
6028 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6032 CP0_CHECK(ctx
->mrp
);
6033 gen_helper_mtc0_maar(cpu_env
, arg
);
6037 CP0_CHECK(ctx
->mrp
);
6038 gen_helper_mtc0_maari(cpu_env
, arg
);
6042 goto cp0_unimplemented
;
6048 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6052 goto cp0_unimplemented
;
6058 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6062 goto cp0_unimplemented
;
6068 #if defined(TARGET_MIPS64)
6069 check_insn(ctx
, ISA_MIPS3
);
6070 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6075 goto cp0_unimplemented
;
6079 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6080 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6083 gen_helper_mtc0_framemask(cpu_env
, arg
);
6087 goto cp0_unimplemented
;
6092 rn
= "Diagnostic"; /* implementation dependent */
6097 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6098 /* BS_STOP isn't good enough here, hflags may have changed. */
6099 gen_save_pc(ctx
->pc
+ 4);
6100 ctx
->bstate
= BS_EXCP
;
6104 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6105 rn
= "TraceControl";
6106 /* Stop translation as we may have switched the execution mode */
6107 ctx
->bstate
= BS_STOP
;
6110 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6111 rn
= "TraceControl2";
6112 /* Stop translation as we may have switched the execution mode */
6113 ctx
->bstate
= BS_STOP
;
6116 /* Stop translation as we may have switched the execution mode */
6117 ctx
->bstate
= BS_STOP
;
6118 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6119 rn
= "UserTraceData";
6120 /* Stop translation as we may have switched the execution mode */
6121 ctx
->bstate
= BS_STOP
;
6124 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6125 /* Stop translation as we may have switched the execution mode */
6126 ctx
->bstate
= BS_STOP
;
6130 goto cp0_unimplemented
;
6137 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6141 goto cp0_unimplemented
;
6147 gen_helper_mtc0_performance0(cpu_env
, arg
);
6148 rn
= "Performance0";
6151 // gen_helper_mtc0_performance1(arg);
6152 rn
= "Performance1";
6155 // gen_helper_mtc0_performance2(arg);
6156 rn
= "Performance2";
6159 // gen_helper_mtc0_performance3(arg);
6160 rn
= "Performance3";
6163 // gen_helper_mtc0_performance4(arg);
6164 rn
= "Performance4";
6167 // gen_helper_mtc0_performance5(arg);
6168 rn
= "Performance5";
6171 // gen_helper_mtc0_performance6(arg);
6172 rn
= "Performance6";
6175 // gen_helper_mtc0_performance7(arg);
6176 rn
= "Performance7";
6179 goto cp0_unimplemented
;
6185 gen_helper_mtc0_errctl(cpu_env
, arg
);
6186 ctx
->bstate
= BS_STOP
;
6190 goto cp0_unimplemented
;
6200 goto cp0_unimplemented
;
6209 gen_helper_mtc0_taglo(cpu_env
, arg
);
6216 gen_helper_mtc0_datalo(cpu_env
, arg
);
6220 goto cp0_unimplemented
;
6229 gen_helper_mtc0_taghi(cpu_env
, arg
);
6236 gen_helper_mtc0_datahi(cpu_env
, arg
);
6241 goto cp0_unimplemented
;
6247 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6251 goto cp0_unimplemented
;
6258 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6262 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6263 tcg_gen_st_tl(arg
, cpu_env
,
6264 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6268 goto cp0_unimplemented
;
6270 /* Stop translation as we may have switched the execution mode */
6271 ctx
->bstate
= BS_STOP
;
6274 goto cp0_unimplemented
;
6276 (void)rn
; /* avoid a compiler warning */
6277 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6278 /* For simplicity assume that all writes can cause interrupts. */
6279 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6281 ctx
->bstate
= BS_STOP
;
6286 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6289 #if defined(TARGET_MIPS64)
6290 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6292 const char *rn
= "invalid";
6295 check_insn(ctx
, ISA_MIPS64
);
6301 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6305 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6306 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6310 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6311 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6315 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6316 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6321 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6325 goto cp0_unimplemented
;
6331 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6332 gen_helper_mfc0_random(arg
, cpu_env
);
6336 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6337 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6341 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6346 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6347 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6351 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6352 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6356 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6357 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6361 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6362 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6363 rn
= "VPEScheFBack";
6366 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6367 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6371 goto cp0_unimplemented
;
6377 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6381 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6382 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6386 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6387 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6391 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6392 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6396 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6397 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6401 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6402 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6406 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6407 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6411 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6412 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6416 goto cp0_unimplemented
;
6422 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6427 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6428 rn
= "GlobalNumber";
6431 goto cp0_unimplemented
;
6437 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6441 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6442 rn
= "ContextConfig";
6443 goto cp0_unimplemented
;
6446 CP0_CHECK(ctx
->ulri
);
6447 tcg_gen_ld_tl(arg
, cpu_env
,
6448 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6452 goto cp0_unimplemented
;
6458 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6462 check_insn(ctx
, ISA_MIPS32R2
);
6463 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6467 goto cp0_unimplemented
;
6473 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6477 check_insn(ctx
, ISA_MIPS32R2
);
6478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6482 check_insn(ctx
, ISA_MIPS32R2
);
6483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6487 check_insn(ctx
, ISA_MIPS32R2
);
6488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6492 check_insn(ctx
, ISA_MIPS32R2
);
6493 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6497 check_insn(ctx
, ISA_MIPS32R2
);
6498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6502 goto cp0_unimplemented
;
6508 check_insn(ctx
, ISA_MIPS32R2
);
6509 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6513 goto cp0_unimplemented
;
6519 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6533 goto cp0_unimplemented
;
6539 /* Mark as an IO operation because we read the time. */
6540 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6543 gen_helper_mfc0_count(arg
, cpu_env
);
6544 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6547 /* Break the TB to be able to take timer interrupts immediately
6548 after reading count. */
6549 ctx
->bstate
= BS_STOP
;
6552 /* 6,7 are implementation dependent */
6554 goto cp0_unimplemented
;
6560 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6564 goto cp0_unimplemented
;
6570 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6573 /* 6,7 are implementation dependent */
6575 goto cp0_unimplemented
;
6581 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6585 check_insn(ctx
, ISA_MIPS32R2
);
6586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6590 check_insn(ctx
, ISA_MIPS32R2
);
6591 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6595 check_insn(ctx
, ISA_MIPS32R2
);
6596 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6600 goto cp0_unimplemented
;
6606 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6610 goto cp0_unimplemented
;
6616 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6620 goto cp0_unimplemented
;
6626 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6630 check_insn(ctx
, ISA_MIPS32R2
);
6631 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6635 check_insn(ctx
, ISA_MIPS32R2
);
6636 CP0_CHECK(ctx
->cmgcr
);
6637 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6641 goto cp0_unimplemented
;
6647 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6651 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6655 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6659 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6667 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6670 /* 6,7 are implementation dependent */
6672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6676 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6680 goto cp0_unimplemented
;
6686 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6690 CP0_CHECK(ctx
->mrp
);
6691 gen_helper_dmfc0_maar(arg
, cpu_env
);
6695 CP0_CHECK(ctx
->mrp
);
6696 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6700 goto cp0_unimplemented
;
6706 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6710 goto cp0_unimplemented
;
6716 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6720 goto cp0_unimplemented
;
6726 check_insn(ctx
, ISA_MIPS3
);
6727 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6731 goto cp0_unimplemented
;
6735 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6736 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6739 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6743 goto cp0_unimplemented
;
6747 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6748 rn
= "'Diagnostic"; /* implementation dependent */
6753 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6757 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6758 rn
= "TraceControl";
6761 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6762 rn
= "TraceControl2";
6765 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6766 rn
= "UserTraceData";
6769 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6773 goto cp0_unimplemented
;
6780 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6784 goto cp0_unimplemented
;
6790 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6791 rn
= "Performance0";
6794 // gen_helper_dmfc0_performance1(arg);
6795 rn
= "Performance1";
6798 // gen_helper_dmfc0_performance2(arg);
6799 rn
= "Performance2";
6802 // gen_helper_dmfc0_performance3(arg);
6803 rn
= "Performance3";
6806 // gen_helper_dmfc0_performance4(arg);
6807 rn
= "Performance4";
6810 // gen_helper_dmfc0_performance5(arg);
6811 rn
= "Performance5";
6814 // gen_helper_dmfc0_performance6(arg);
6815 rn
= "Performance6";
6818 // gen_helper_dmfc0_performance7(arg);
6819 rn
= "Performance7";
6822 goto cp0_unimplemented
;
6828 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6832 goto cp0_unimplemented
;
6839 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6843 goto cp0_unimplemented
;
6852 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6859 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6863 goto cp0_unimplemented
;
6872 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6879 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6883 goto cp0_unimplemented
;
6889 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6893 goto cp0_unimplemented
;
6900 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6904 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6905 tcg_gen_ld_tl(arg
, cpu_env
,
6906 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6910 goto cp0_unimplemented
;
6914 goto cp0_unimplemented
;
6916 (void)rn
; /* avoid a compiler warning */
6917 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6921 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6922 gen_mfc0_unimplemented(ctx
, arg
);
6925 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6927 const char *rn
= "invalid";
6930 check_insn(ctx
, ISA_MIPS64
);
6932 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6940 gen_helper_mtc0_index(cpu_env
, arg
);
6944 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6945 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6949 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6954 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6964 goto cp0_unimplemented
;
6974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6975 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6980 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6985 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6990 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6995 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7000 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7001 rn
= "VPEScheFBack";
7004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7005 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7009 goto cp0_unimplemented
;
7015 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7019 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7020 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7024 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7025 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7029 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7030 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7034 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7035 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7039 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7040 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7044 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7045 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7050 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7054 goto cp0_unimplemented
;
7060 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7066 rn
= "GlobalNumber";
7069 goto cp0_unimplemented
;
7075 gen_helper_mtc0_context(cpu_env
, arg
);
7079 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7080 rn
= "ContextConfig";
7081 goto cp0_unimplemented
;
7084 CP0_CHECK(ctx
->ulri
);
7085 tcg_gen_st_tl(arg
, cpu_env
,
7086 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7090 goto cp0_unimplemented
;
7096 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7100 check_insn(ctx
, ISA_MIPS32R2
);
7101 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7105 goto cp0_unimplemented
;
7111 gen_helper_mtc0_wired(cpu_env
, arg
);
7115 check_insn(ctx
, ISA_MIPS32R2
);
7116 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7120 check_insn(ctx
, ISA_MIPS32R2
);
7121 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7125 check_insn(ctx
, ISA_MIPS32R2
);
7126 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7130 check_insn(ctx
, ISA_MIPS32R2
);
7131 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7135 check_insn(ctx
, ISA_MIPS32R2
);
7136 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7140 goto cp0_unimplemented
;
7146 check_insn(ctx
, ISA_MIPS32R2
);
7147 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7148 ctx
->bstate
= BS_STOP
;
7152 goto cp0_unimplemented
;
7170 goto cp0_unimplemented
;
7176 gen_helper_mtc0_count(cpu_env
, arg
);
7179 /* 6,7 are implementation dependent */
7181 goto cp0_unimplemented
;
7183 /* Stop translation as we may have switched the execution mode */
7184 ctx
->bstate
= BS_STOP
;
7189 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7193 goto cp0_unimplemented
;
7199 gen_helper_mtc0_compare(cpu_env
, arg
);
7202 /* 6,7 are implementation dependent */
7204 goto cp0_unimplemented
;
7206 /* Stop translation as we may have switched the execution mode */
7207 ctx
->bstate
= BS_STOP
;
7212 save_cpu_state(ctx
, 1);
7213 gen_helper_mtc0_status(cpu_env
, arg
);
7214 /* BS_STOP isn't good enough here, hflags may have changed. */
7215 gen_save_pc(ctx
->pc
+ 4);
7216 ctx
->bstate
= BS_EXCP
;
7220 check_insn(ctx
, ISA_MIPS32R2
);
7221 gen_helper_mtc0_intctl(cpu_env
, arg
);
7222 /* Stop translation as we may have switched the execution mode */
7223 ctx
->bstate
= BS_STOP
;
7227 check_insn(ctx
, ISA_MIPS32R2
);
7228 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7229 /* Stop translation as we may have switched the execution mode */
7230 ctx
->bstate
= BS_STOP
;
7234 check_insn(ctx
, ISA_MIPS32R2
);
7235 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7236 /* Stop translation as we may have switched the execution mode */
7237 ctx
->bstate
= BS_STOP
;
7241 goto cp0_unimplemented
;
7247 save_cpu_state(ctx
, 1);
7248 /* Mark as an IO operation because we may trigger a software
7250 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7253 gen_helper_mtc0_cause(cpu_env
, arg
);
7254 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7257 /* Stop translation as we may have triggered an intetrupt */
7258 ctx
->bstate
= BS_STOP
;
7262 goto cp0_unimplemented
;
7268 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7272 goto cp0_unimplemented
;
7282 check_insn(ctx
, ISA_MIPS32R2
);
7283 gen_helper_mtc0_ebase(cpu_env
, arg
);
7287 goto cp0_unimplemented
;
7293 gen_helper_mtc0_config0(cpu_env
, arg
);
7295 /* Stop translation as we may have switched the execution mode */
7296 ctx
->bstate
= BS_STOP
;
7299 /* ignored, read only */
7303 gen_helper_mtc0_config2(cpu_env
, arg
);
7305 /* Stop translation as we may have switched the execution mode */
7306 ctx
->bstate
= BS_STOP
;
7309 gen_helper_mtc0_config3(cpu_env
, arg
);
7311 /* Stop translation as we may have switched the execution mode */
7312 ctx
->bstate
= BS_STOP
;
7315 /* currently ignored */
7319 gen_helper_mtc0_config5(cpu_env
, arg
);
7321 /* Stop translation as we may have switched the execution mode */
7322 ctx
->bstate
= BS_STOP
;
7324 /* 6,7 are implementation dependent */
7326 rn
= "Invalid config selector";
7327 goto cp0_unimplemented
;
7333 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7337 CP0_CHECK(ctx
->mrp
);
7338 gen_helper_mtc0_maar(cpu_env
, arg
);
7342 CP0_CHECK(ctx
->mrp
);
7343 gen_helper_mtc0_maari(cpu_env
, arg
);
7347 goto cp0_unimplemented
;
7353 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7357 goto cp0_unimplemented
;
7363 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7367 goto cp0_unimplemented
;
7373 check_insn(ctx
, ISA_MIPS3
);
7374 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7378 goto cp0_unimplemented
;
7382 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7383 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7386 gen_helper_mtc0_framemask(cpu_env
, arg
);
7390 goto cp0_unimplemented
;
7395 rn
= "Diagnostic"; /* implementation dependent */
7400 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7401 /* BS_STOP isn't good enough here, hflags may have changed. */
7402 gen_save_pc(ctx
->pc
+ 4);
7403 ctx
->bstate
= BS_EXCP
;
7407 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7408 /* Stop translation as we may have switched the execution mode */
7409 ctx
->bstate
= BS_STOP
;
7410 rn
= "TraceControl";
7413 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7414 /* Stop translation as we may have switched the execution mode */
7415 ctx
->bstate
= BS_STOP
;
7416 rn
= "TraceControl2";
7419 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7420 /* Stop translation as we may have switched the execution mode */
7421 ctx
->bstate
= BS_STOP
;
7422 rn
= "UserTraceData";
7425 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7426 /* Stop translation as we may have switched the execution mode */
7427 ctx
->bstate
= BS_STOP
;
7431 goto cp0_unimplemented
;
7438 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7442 goto cp0_unimplemented
;
7448 gen_helper_mtc0_performance0(cpu_env
, arg
);
7449 rn
= "Performance0";
7452 // gen_helper_mtc0_performance1(cpu_env, arg);
7453 rn
= "Performance1";
7456 // gen_helper_mtc0_performance2(cpu_env, arg);
7457 rn
= "Performance2";
7460 // gen_helper_mtc0_performance3(cpu_env, arg);
7461 rn
= "Performance3";
7464 // gen_helper_mtc0_performance4(cpu_env, arg);
7465 rn
= "Performance4";
7468 // gen_helper_mtc0_performance5(cpu_env, arg);
7469 rn
= "Performance5";
7472 // gen_helper_mtc0_performance6(cpu_env, arg);
7473 rn
= "Performance6";
7476 // gen_helper_mtc0_performance7(cpu_env, arg);
7477 rn
= "Performance7";
7480 goto cp0_unimplemented
;
7486 gen_helper_mtc0_errctl(cpu_env
, arg
);
7487 ctx
->bstate
= BS_STOP
;
7491 goto cp0_unimplemented
;
7501 goto cp0_unimplemented
;
7510 gen_helper_mtc0_taglo(cpu_env
, arg
);
7517 gen_helper_mtc0_datalo(cpu_env
, arg
);
7521 goto cp0_unimplemented
;
7530 gen_helper_mtc0_taghi(cpu_env
, arg
);
7537 gen_helper_mtc0_datahi(cpu_env
, arg
);
7542 goto cp0_unimplemented
;
7548 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7552 goto cp0_unimplemented
;
7559 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7563 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7564 tcg_gen_st_tl(arg
, cpu_env
,
7565 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7569 goto cp0_unimplemented
;
7571 /* Stop translation as we may have switched the execution mode */
7572 ctx
->bstate
= BS_STOP
;
7575 goto cp0_unimplemented
;
7577 (void)rn
; /* avoid a compiler warning */
7578 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7579 /* For simplicity assume that all writes can cause interrupts. */
7580 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7582 ctx
->bstate
= BS_STOP
;
7587 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7589 #endif /* TARGET_MIPS64 */
7591 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7592 int u
, int sel
, int h
)
7594 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7595 TCGv t0
= tcg_temp_local_new();
7597 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7598 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7599 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7600 tcg_gen_movi_tl(t0
, -1);
7601 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7602 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7603 tcg_gen_movi_tl(t0
, -1);
7609 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7612 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7622 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7625 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7628 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7631 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7634 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7637 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7640 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7643 gen_mfc0(ctx
, t0
, rt
, sel
);
7650 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7653 gen_mfc0(ctx
, t0
, rt
, sel
);
7659 gen_helper_mftc0_status(t0
, cpu_env
);
7662 gen_mfc0(ctx
, t0
, rt
, sel
);
7668 gen_helper_mftc0_cause(t0
, cpu_env
);
7678 gen_helper_mftc0_epc(t0
, cpu_env
);
7688 gen_helper_mftc0_ebase(t0
, cpu_env
);
7698 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7708 gen_helper_mftc0_debug(t0
, cpu_env
);
7711 gen_mfc0(ctx
, t0
, rt
, sel
);
7716 gen_mfc0(ctx
, t0
, rt
, sel
);
7718 } else switch (sel
) {
7719 /* GPR registers. */
7721 gen_helper_1e0i(mftgpr
, t0
, rt
);
7723 /* Auxiliary CPU registers */
7727 gen_helper_1e0i(mftlo
, t0
, 0);
7730 gen_helper_1e0i(mfthi
, t0
, 0);
7733 gen_helper_1e0i(mftacx
, t0
, 0);
7736 gen_helper_1e0i(mftlo
, t0
, 1);
7739 gen_helper_1e0i(mfthi
, t0
, 1);
7742 gen_helper_1e0i(mftacx
, t0
, 1);
7745 gen_helper_1e0i(mftlo
, t0
, 2);
7748 gen_helper_1e0i(mfthi
, t0
, 2);
7751 gen_helper_1e0i(mftacx
, t0
, 2);
7754 gen_helper_1e0i(mftlo
, t0
, 3);
7757 gen_helper_1e0i(mfthi
, t0
, 3);
7760 gen_helper_1e0i(mftacx
, t0
, 3);
7763 gen_helper_mftdsp(t0
, cpu_env
);
7769 /* Floating point (COP1). */
7771 /* XXX: For now we support only a single FPU context. */
7773 TCGv_i32 fp0
= tcg_temp_new_i32();
7775 gen_load_fpr32(ctx
, fp0
, rt
);
7776 tcg_gen_ext_i32_tl(t0
, fp0
);
7777 tcg_temp_free_i32(fp0
);
7779 TCGv_i32 fp0
= tcg_temp_new_i32();
7781 gen_load_fpr32h(ctx
, fp0
, rt
);
7782 tcg_gen_ext_i32_tl(t0
, fp0
);
7783 tcg_temp_free_i32(fp0
);
7787 /* XXX: For now we support only a single FPU context. */
7788 gen_helper_1e0i(cfc1
, t0
, rt
);
7790 /* COP2: Not implemented. */
7797 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7798 gen_store_gpr(t0
, rd
);
7804 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7805 generate_exception_end(ctx
, EXCP_RI
);
7808 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7809 int u
, int sel
, int h
)
7811 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7812 TCGv t0
= tcg_temp_local_new();
7814 gen_load_gpr(t0
, rt
);
7815 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7816 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7817 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7819 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7820 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7827 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7830 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7840 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7843 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7846 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7849 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7852 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7855 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7858 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7861 gen_mtc0(ctx
, t0
, rd
, sel
);
7868 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7871 gen_mtc0(ctx
, t0
, rd
, sel
);
7877 gen_helper_mttc0_status(cpu_env
, t0
);
7880 gen_mtc0(ctx
, t0
, rd
, sel
);
7886 gen_helper_mttc0_cause(cpu_env
, t0
);
7896 gen_helper_mttc0_ebase(cpu_env
, t0
);
7906 gen_helper_mttc0_debug(cpu_env
, t0
);
7909 gen_mtc0(ctx
, t0
, rd
, sel
);
7914 gen_mtc0(ctx
, t0
, rd
, sel
);
7916 } else switch (sel
) {
7917 /* GPR registers. */
7919 gen_helper_0e1i(mttgpr
, t0
, rd
);
7921 /* Auxiliary CPU registers */
7925 gen_helper_0e1i(mttlo
, t0
, 0);
7928 gen_helper_0e1i(mtthi
, t0
, 0);
7931 gen_helper_0e1i(mttacx
, t0
, 0);
7934 gen_helper_0e1i(mttlo
, t0
, 1);
7937 gen_helper_0e1i(mtthi
, t0
, 1);
7940 gen_helper_0e1i(mttacx
, t0
, 1);
7943 gen_helper_0e1i(mttlo
, t0
, 2);
7946 gen_helper_0e1i(mtthi
, t0
, 2);
7949 gen_helper_0e1i(mttacx
, t0
, 2);
7952 gen_helper_0e1i(mttlo
, t0
, 3);
7955 gen_helper_0e1i(mtthi
, t0
, 3);
7958 gen_helper_0e1i(mttacx
, t0
, 3);
7961 gen_helper_mttdsp(cpu_env
, t0
);
7967 /* Floating point (COP1). */
7969 /* XXX: For now we support only a single FPU context. */
7971 TCGv_i32 fp0
= tcg_temp_new_i32();
7973 tcg_gen_trunc_tl_i32(fp0
, t0
);
7974 gen_store_fpr32(ctx
, fp0
, rd
);
7975 tcg_temp_free_i32(fp0
);
7977 TCGv_i32 fp0
= tcg_temp_new_i32();
7979 tcg_gen_trunc_tl_i32(fp0
, t0
);
7980 gen_store_fpr32h(ctx
, fp0
, rd
);
7981 tcg_temp_free_i32(fp0
);
7985 /* XXX: For now we support only a single FPU context. */
7987 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7989 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7990 tcg_temp_free_i32(fs_tmp
);
7992 /* Stop translation as we may have changed hflags */
7993 ctx
->bstate
= BS_STOP
;
7995 /* COP2: Not implemented. */
8002 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8008 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8009 generate_exception_end(ctx
, EXCP_RI
);
8012 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8014 const char *opn
= "ldst";
8016 check_cp0_enabled(ctx
);
8023 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8028 TCGv t0
= tcg_temp_new();
8030 gen_load_gpr(t0
, rt
);
8031 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8036 #if defined(TARGET_MIPS64)
8038 check_insn(ctx
, ISA_MIPS3
);
8043 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8047 check_insn(ctx
, ISA_MIPS3
);
8049 TCGv t0
= tcg_temp_new();
8051 gen_load_gpr(t0
, rt
);
8052 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8064 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8070 TCGv t0
= tcg_temp_new();
8071 gen_load_gpr(t0
, rt
);
8072 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8078 check_insn(ctx
, ASE_MT
);
8083 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8084 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8088 check_insn(ctx
, ASE_MT
);
8089 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8090 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8095 if (!env
->tlb
->helper_tlbwi
)
8097 gen_helper_tlbwi(cpu_env
);
8102 if (!env
->tlb
->helper_tlbinv
) {
8105 gen_helper_tlbinv(cpu_env
);
8106 } /* treat as nop if TLBINV not supported */
8111 if (!env
->tlb
->helper_tlbinvf
) {
8114 gen_helper_tlbinvf(cpu_env
);
8115 } /* treat as nop if TLBINV not supported */
8119 if (!env
->tlb
->helper_tlbwr
)
8121 gen_helper_tlbwr(cpu_env
);
8125 if (!env
->tlb
->helper_tlbp
)
8127 gen_helper_tlbp(cpu_env
);
8131 if (!env
->tlb
->helper_tlbr
)
8133 gen_helper_tlbr(cpu_env
);
8135 case OPC_ERET
: /* OPC_ERETNC */
8136 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8137 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8140 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8141 if (ctx
->opcode
& (1 << bit_shift
)) {
8144 check_insn(ctx
, ISA_MIPS32R5
);
8145 gen_helper_eretnc(cpu_env
);
8149 check_insn(ctx
, ISA_MIPS2
);
8150 gen_helper_eret(cpu_env
);
8152 ctx
->bstate
= BS_EXCP
;
8157 check_insn(ctx
, ISA_MIPS32
);
8158 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8159 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8162 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8164 generate_exception_end(ctx
, EXCP_RI
);
8166 gen_helper_deret(cpu_env
);
8167 ctx
->bstate
= BS_EXCP
;
8172 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8173 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8174 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8177 /* If we get an exception, we want to restart at next instruction */
8179 save_cpu_state(ctx
, 1);
8181 gen_helper_wait(cpu_env
);
8182 ctx
->bstate
= BS_EXCP
;
8187 generate_exception_end(ctx
, EXCP_RI
);
8190 (void)opn
; /* avoid a compiler warning */
8192 #endif /* !CONFIG_USER_ONLY */
8194 /* CP1 Branches (before delay slot) */
8195 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8196 int32_t cc
, int32_t offset
)
8198 target_ulong btarget
;
8199 TCGv_i32 t0
= tcg_temp_new_i32();
8201 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8202 generate_exception_end(ctx
, EXCP_RI
);
8207 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8209 btarget
= ctx
->pc
+ 4 + offset
;
8213 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8214 tcg_gen_not_i32(t0
, t0
);
8215 tcg_gen_andi_i32(t0
, t0
, 1);
8216 tcg_gen_extu_i32_tl(bcond
, t0
);
8219 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8220 tcg_gen_not_i32(t0
, t0
);
8221 tcg_gen_andi_i32(t0
, t0
, 1);
8222 tcg_gen_extu_i32_tl(bcond
, t0
);
8225 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8226 tcg_gen_andi_i32(t0
, t0
, 1);
8227 tcg_gen_extu_i32_tl(bcond
, t0
);
8230 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8231 tcg_gen_andi_i32(t0
, t0
, 1);
8232 tcg_gen_extu_i32_tl(bcond
, t0
);
8234 ctx
->hflags
|= MIPS_HFLAG_BL
;
8238 TCGv_i32 t1
= tcg_temp_new_i32();
8239 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8240 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8241 tcg_gen_nand_i32(t0
, t0
, t1
);
8242 tcg_temp_free_i32(t1
);
8243 tcg_gen_andi_i32(t0
, t0
, 1);
8244 tcg_gen_extu_i32_tl(bcond
, t0
);
8249 TCGv_i32 t1
= tcg_temp_new_i32();
8250 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8251 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8252 tcg_gen_or_i32(t0
, t0
, t1
);
8253 tcg_temp_free_i32(t1
);
8254 tcg_gen_andi_i32(t0
, t0
, 1);
8255 tcg_gen_extu_i32_tl(bcond
, t0
);
8260 TCGv_i32 t1
= tcg_temp_new_i32();
8261 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8262 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8263 tcg_gen_and_i32(t0
, t0
, t1
);
8264 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8265 tcg_gen_and_i32(t0
, t0
, t1
);
8266 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8267 tcg_gen_nand_i32(t0
, t0
, t1
);
8268 tcg_temp_free_i32(t1
);
8269 tcg_gen_andi_i32(t0
, t0
, 1);
8270 tcg_gen_extu_i32_tl(bcond
, t0
);
8275 TCGv_i32 t1
= tcg_temp_new_i32();
8276 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8277 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8278 tcg_gen_or_i32(t0
, t0
, t1
);
8279 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8280 tcg_gen_or_i32(t0
, t0
, t1
);
8281 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8282 tcg_gen_or_i32(t0
, t0
, t1
);
8283 tcg_temp_free_i32(t1
);
8284 tcg_gen_andi_i32(t0
, t0
, 1);
8285 tcg_gen_extu_i32_tl(bcond
, t0
);
8288 ctx
->hflags
|= MIPS_HFLAG_BC
;
8291 MIPS_INVAL("cp1 cond branch");
8292 generate_exception_end(ctx
, EXCP_RI
);
8295 ctx
->btarget
= btarget
;
8296 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8298 tcg_temp_free_i32(t0
);
8301 /* R6 CP1 Branches */
8302 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8303 int32_t ft
, int32_t offset
,
8306 target_ulong btarget
;
8307 TCGv_i64 t0
= tcg_temp_new_i64();
8309 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8310 #ifdef MIPS_DEBUG_DISAS
8311 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8314 generate_exception_end(ctx
, EXCP_RI
);
8318 gen_load_fpr64(ctx
, t0
, ft
);
8319 tcg_gen_andi_i64(t0
, t0
, 1);
8321 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8325 tcg_gen_xori_i64(t0
, t0
, 1);
8326 ctx
->hflags
|= MIPS_HFLAG_BC
;
8329 /* t0 already set */
8330 ctx
->hflags
|= MIPS_HFLAG_BC
;
8333 MIPS_INVAL("cp1 cond branch");
8334 generate_exception_end(ctx
, EXCP_RI
);
8338 tcg_gen_trunc_i64_tl(bcond
, t0
);
8340 ctx
->btarget
= btarget
;
8342 switch (delayslot_size
) {
8344 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8347 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8352 tcg_temp_free_i64(t0
);
8355 /* Coprocessor 1 (FPU) */
8357 #define FOP(func, fmt) (((fmt) << 21) | (func))
8360 OPC_ADD_S
= FOP(0, FMT_S
),
8361 OPC_SUB_S
= FOP(1, FMT_S
),
8362 OPC_MUL_S
= FOP(2, FMT_S
),
8363 OPC_DIV_S
= FOP(3, FMT_S
),
8364 OPC_SQRT_S
= FOP(4, FMT_S
),
8365 OPC_ABS_S
= FOP(5, FMT_S
),
8366 OPC_MOV_S
= FOP(6, FMT_S
),
8367 OPC_NEG_S
= FOP(7, FMT_S
),
8368 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8369 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8370 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8371 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8372 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8373 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8374 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8375 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8376 OPC_SEL_S
= FOP(16, FMT_S
),
8377 OPC_MOVCF_S
= FOP(17, FMT_S
),
8378 OPC_MOVZ_S
= FOP(18, FMT_S
),
8379 OPC_MOVN_S
= FOP(19, FMT_S
),
8380 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8381 OPC_RECIP_S
= FOP(21, FMT_S
),
8382 OPC_RSQRT_S
= FOP(22, FMT_S
),
8383 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8384 OPC_MADDF_S
= FOP(24, FMT_S
),
8385 OPC_MSUBF_S
= FOP(25, FMT_S
),
8386 OPC_RINT_S
= FOP(26, FMT_S
),
8387 OPC_CLASS_S
= FOP(27, FMT_S
),
8388 OPC_MIN_S
= FOP(28, FMT_S
),
8389 OPC_RECIP2_S
= FOP(28, FMT_S
),
8390 OPC_MINA_S
= FOP(29, FMT_S
),
8391 OPC_RECIP1_S
= FOP(29, FMT_S
),
8392 OPC_MAX_S
= FOP(30, FMT_S
),
8393 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8394 OPC_MAXA_S
= FOP(31, FMT_S
),
8395 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8396 OPC_CVT_D_S
= FOP(33, FMT_S
),
8397 OPC_CVT_W_S
= FOP(36, FMT_S
),
8398 OPC_CVT_L_S
= FOP(37, FMT_S
),
8399 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8400 OPC_CMP_F_S
= FOP (48, FMT_S
),
8401 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8402 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8403 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8404 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8405 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8406 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8407 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8408 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8409 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8410 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8411 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8412 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8413 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8414 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8415 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8417 OPC_ADD_D
= FOP(0, FMT_D
),
8418 OPC_SUB_D
= FOP(1, FMT_D
),
8419 OPC_MUL_D
= FOP(2, FMT_D
),
8420 OPC_DIV_D
= FOP(3, FMT_D
),
8421 OPC_SQRT_D
= FOP(4, FMT_D
),
8422 OPC_ABS_D
= FOP(5, FMT_D
),
8423 OPC_MOV_D
= FOP(6, FMT_D
),
8424 OPC_NEG_D
= FOP(7, FMT_D
),
8425 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8426 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8427 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8428 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8429 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8430 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8431 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8432 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8433 OPC_SEL_D
= FOP(16, FMT_D
),
8434 OPC_MOVCF_D
= FOP(17, FMT_D
),
8435 OPC_MOVZ_D
= FOP(18, FMT_D
),
8436 OPC_MOVN_D
= FOP(19, FMT_D
),
8437 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8438 OPC_RECIP_D
= FOP(21, FMT_D
),
8439 OPC_RSQRT_D
= FOP(22, FMT_D
),
8440 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8441 OPC_MADDF_D
= FOP(24, FMT_D
),
8442 OPC_MSUBF_D
= FOP(25, FMT_D
),
8443 OPC_RINT_D
= FOP(26, FMT_D
),
8444 OPC_CLASS_D
= FOP(27, FMT_D
),
8445 OPC_MIN_D
= FOP(28, FMT_D
),
8446 OPC_RECIP2_D
= FOP(28, FMT_D
),
8447 OPC_MINA_D
= FOP(29, FMT_D
),
8448 OPC_RECIP1_D
= FOP(29, FMT_D
),
8449 OPC_MAX_D
= FOP(30, FMT_D
),
8450 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8451 OPC_MAXA_D
= FOP(31, FMT_D
),
8452 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8453 OPC_CVT_S_D
= FOP(32, FMT_D
),
8454 OPC_CVT_W_D
= FOP(36, FMT_D
),
8455 OPC_CVT_L_D
= FOP(37, FMT_D
),
8456 OPC_CMP_F_D
= FOP (48, FMT_D
),
8457 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8458 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8459 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8460 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8461 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8462 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8463 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8464 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8465 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8466 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8467 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8468 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8469 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8470 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8471 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8473 OPC_CVT_S_W
= FOP(32, FMT_W
),
8474 OPC_CVT_D_W
= FOP(33, FMT_W
),
8475 OPC_CVT_S_L
= FOP(32, FMT_L
),
8476 OPC_CVT_D_L
= FOP(33, FMT_L
),
8477 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8479 OPC_ADD_PS
= FOP(0, FMT_PS
),
8480 OPC_SUB_PS
= FOP(1, FMT_PS
),
8481 OPC_MUL_PS
= FOP(2, FMT_PS
),
8482 OPC_DIV_PS
= FOP(3, FMT_PS
),
8483 OPC_ABS_PS
= FOP(5, FMT_PS
),
8484 OPC_MOV_PS
= FOP(6, FMT_PS
),
8485 OPC_NEG_PS
= FOP(7, FMT_PS
),
8486 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8487 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8488 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8489 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8490 OPC_MULR_PS
= FOP(26, FMT_PS
),
8491 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8492 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8493 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8494 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8496 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8497 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8498 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8499 OPC_PLL_PS
= FOP(44, FMT_PS
),
8500 OPC_PLU_PS
= FOP(45, FMT_PS
),
8501 OPC_PUL_PS
= FOP(46, FMT_PS
),
8502 OPC_PUU_PS
= FOP(47, FMT_PS
),
8503 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8504 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8505 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8506 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8507 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8508 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8509 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8510 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8511 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8512 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8513 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8514 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8515 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8516 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8517 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8518 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8522 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8523 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8524 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8525 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8526 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8527 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8528 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8529 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8530 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8531 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8532 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8533 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8534 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8535 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8536 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8537 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8538 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8539 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8540 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8541 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8542 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8543 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8545 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8546 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8547 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8548 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8549 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8550 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8551 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8552 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8553 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8554 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8555 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8556 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8557 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8558 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8559 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8560 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8561 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8562 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8563 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8564 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8565 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8566 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8568 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8570 TCGv t0
= tcg_temp_new();
8575 TCGv_i32 fp0
= tcg_temp_new_i32();
8577 gen_load_fpr32(ctx
, fp0
, fs
);
8578 tcg_gen_ext_i32_tl(t0
, fp0
);
8579 tcg_temp_free_i32(fp0
);
8581 gen_store_gpr(t0
, rt
);
8584 gen_load_gpr(t0
, rt
);
8586 TCGv_i32 fp0
= tcg_temp_new_i32();
8588 tcg_gen_trunc_tl_i32(fp0
, t0
);
8589 gen_store_fpr32(ctx
, fp0
, fs
);
8590 tcg_temp_free_i32(fp0
);
8594 gen_helper_1e0i(cfc1
, t0
, fs
);
8595 gen_store_gpr(t0
, rt
);
8598 gen_load_gpr(t0
, rt
);
8599 save_cpu_state(ctx
, 0);
8601 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8603 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8604 tcg_temp_free_i32(fs_tmp
);
8606 /* Stop translation as we may have changed hflags */
8607 ctx
->bstate
= BS_STOP
;
8609 #if defined(TARGET_MIPS64)
8611 gen_load_fpr64(ctx
, t0
, fs
);
8612 gen_store_gpr(t0
, rt
);
8615 gen_load_gpr(t0
, rt
);
8616 gen_store_fpr64(ctx
, t0
, fs
);
8621 TCGv_i32 fp0
= tcg_temp_new_i32();
8623 gen_load_fpr32h(ctx
, fp0
, fs
);
8624 tcg_gen_ext_i32_tl(t0
, fp0
);
8625 tcg_temp_free_i32(fp0
);
8627 gen_store_gpr(t0
, rt
);
8630 gen_load_gpr(t0
, rt
);
8632 TCGv_i32 fp0
= tcg_temp_new_i32();
8634 tcg_gen_trunc_tl_i32(fp0
, t0
);
8635 gen_store_fpr32h(ctx
, fp0
, fs
);
8636 tcg_temp_free_i32(fp0
);
8640 MIPS_INVAL("cp1 move");
8641 generate_exception_end(ctx
, EXCP_RI
);
8649 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8665 l1
= gen_new_label();
8666 t0
= tcg_temp_new_i32();
8667 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8668 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8669 tcg_temp_free_i32(t0
);
8671 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8673 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8678 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8682 TCGv_i32 t0
= tcg_temp_new_i32();
8683 TCGLabel
*l1
= gen_new_label();
8690 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8691 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8692 gen_load_fpr32(ctx
, t0
, fs
);
8693 gen_store_fpr32(ctx
, t0
, fd
);
8695 tcg_temp_free_i32(t0
);
8698 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8701 TCGv_i32 t0
= tcg_temp_new_i32();
8703 TCGLabel
*l1
= gen_new_label();
8710 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8711 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8712 tcg_temp_free_i32(t0
);
8713 fp0
= tcg_temp_new_i64();
8714 gen_load_fpr64(ctx
, fp0
, fs
);
8715 gen_store_fpr64(ctx
, fp0
, fd
);
8716 tcg_temp_free_i64(fp0
);
8720 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8724 TCGv_i32 t0
= tcg_temp_new_i32();
8725 TCGLabel
*l1
= gen_new_label();
8726 TCGLabel
*l2
= gen_new_label();
8733 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8734 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8735 gen_load_fpr32(ctx
, t0
, fs
);
8736 gen_store_fpr32(ctx
, t0
, fd
);
8739 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8740 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8741 gen_load_fpr32h(ctx
, t0
, fs
);
8742 gen_store_fpr32h(ctx
, t0
, fd
);
8743 tcg_temp_free_i32(t0
);
8747 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8750 TCGv_i32 t1
= tcg_const_i32(0);
8751 TCGv_i32 fp0
= tcg_temp_new_i32();
8752 TCGv_i32 fp1
= tcg_temp_new_i32();
8753 TCGv_i32 fp2
= tcg_temp_new_i32();
8754 gen_load_fpr32(ctx
, fp0
, fd
);
8755 gen_load_fpr32(ctx
, fp1
, ft
);
8756 gen_load_fpr32(ctx
, fp2
, fs
);
8760 tcg_gen_andi_i32(fp0
, fp0
, 1);
8761 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8764 tcg_gen_andi_i32(fp1
, fp1
, 1);
8765 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8768 tcg_gen_andi_i32(fp1
, fp1
, 1);
8769 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8772 MIPS_INVAL("gen_sel_s");
8773 generate_exception_end(ctx
, EXCP_RI
);
8777 gen_store_fpr32(ctx
, fp0
, fd
);
8778 tcg_temp_free_i32(fp2
);
8779 tcg_temp_free_i32(fp1
);
8780 tcg_temp_free_i32(fp0
);
8781 tcg_temp_free_i32(t1
);
8784 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8787 TCGv_i64 t1
= tcg_const_i64(0);
8788 TCGv_i64 fp0
= tcg_temp_new_i64();
8789 TCGv_i64 fp1
= tcg_temp_new_i64();
8790 TCGv_i64 fp2
= tcg_temp_new_i64();
8791 gen_load_fpr64(ctx
, fp0
, fd
);
8792 gen_load_fpr64(ctx
, fp1
, ft
);
8793 gen_load_fpr64(ctx
, fp2
, fs
);
8797 tcg_gen_andi_i64(fp0
, fp0
, 1);
8798 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8801 tcg_gen_andi_i64(fp1
, fp1
, 1);
8802 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8805 tcg_gen_andi_i64(fp1
, fp1
, 1);
8806 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8809 MIPS_INVAL("gen_sel_d");
8810 generate_exception_end(ctx
, EXCP_RI
);
8814 gen_store_fpr64(ctx
, fp0
, fd
);
8815 tcg_temp_free_i64(fp2
);
8816 tcg_temp_free_i64(fp1
);
8817 tcg_temp_free_i64(fp0
);
8818 tcg_temp_free_i64(t1
);
8821 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8822 int ft
, int fs
, int fd
, int cc
)
8824 uint32_t func
= ctx
->opcode
& 0x3f;
8828 TCGv_i32 fp0
= tcg_temp_new_i32();
8829 TCGv_i32 fp1
= tcg_temp_new_i32();
8831 gen_load_fpr32(ctx
, fp0
, fs
);
8832 gen_load_fpr32(ctx
, fp1
, ft
);
8833 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8834 tcg_temp_free_i32(fp1
);
8835 gen_store_fpr32(ctx
, fp0
, fd
);
8836 tcg_temp_free_i32(fp0
);
8841 TCGv_i32 fp0
= tcg_temp_new_i32();
8842 TCGv_i32 fp1
= tcg_temp_new_i32();
8844 gen_load_fpr32(ctx
, fp0
, fs
);
8845 gen_load_fpr32(ctx
, fp1
, ft
);
8846 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8847 tcg_temp_free_i32(fp1
);
8848 gen_store_fpr32(ctx
, fp0
, fd
);
8849 tcg_temp_free_i32(fp0
);
8854 TCGv_i32 fp0
= tcg_temp_new_i32();
8855 TCGv_i32 fp1
= tcg_temp_new_i32();
8857 gen_load_fpr32(ctx
, fp0
, fs
);
8858 gen_load_fpr32(ctx
, fp1
, ft
);
8859 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8860 tcg_temp_free_i32(fp1
);
8861 gen_store_fpr32(ctx
, fp0
, fd
);
8862 tcg_temp_free_i32(fp0
);
8867 TCGv_i32 fp0
= tcg_temp_new_i32();
8868 TCGv_i32 fp1
= tcg_temp_new_i32();
8870 gen_load_fpr32(ctx
, fp0
, fs
);
8871 gen_load_fpr32(ctx
, fp1
, ft
);
8872 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8873 tcg_temp_free_i32(fp1
);
8874 gen_store_fpr32(ctx
, fp0
, fd
);
8875 tcg_temp_free_i32(fp0
);
8880 TCGv_i32 fp0
= tcg_temp_new_i32();
8882 gen_load_fpr32(ctx
, fp0
, fs
);
8883 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8884 gen_store_fpr32(ctx
, fp0
, fd
);
8885 tcg_temp_free_i32(fp0
);
8890 TCGv_i32 fp0
= tcg_temp_new_i32();
8892 gen_load_fpr32(ctx
, fp0
, fs
);
8893 gen_helper_float_abs_s(fp0
, fp0
);
8894 gen_store_fpr32(ctx
, fp0
, fd
);
8895 tcg_temp_free_i32(fp0
);
8900 TCGv_i32 fp0
= tcg_temp_new_i32();
8902 gen_load_fpr32(ctx
, fp0
, fs
);
8903 gen_store_fpr32(ctx
, fp0
, fd
);
8904 tcg_temp_free_i32(fp0
);
8909 TCGv_i32 fp0
= tcg_temp_new_i32();
8911 gen_load_fpr32(ctx
, fp0
, fs
);
8912 gen_helper_float_chs_s(fp0
, fp0
);
8913 gen_store_fpr32(ctx
, fp0
, fd
);
8914 tcg_temp_free_i32(fp0
);
8918 check_cp1_64bitmode(ctx
);
8920 TCGv_i32 fp32
= tcg_temp_new_i32();
8921 TCGv_i64 fp64
= tcg_temp_new_i64();
8923 gen_load_fpr32(ctx
, fp32
, fs
);
8924 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8925 tcg_temp_free_i32(fp32
);
8926 gen_store_fpr64(ctx
, fp64
, fd
);
8927 tcg_temp_free_i64(fp64
);
8931 check_cp1_64bitmode(ctx
);
8933 TCGv_i32 fp32
= tcg_temp_new_i32();
8934 TCGv_i64 fp64
= tcg_temp_new_i64();
8936 gen_load_fpr32(ctx
, fp32
, fs
);
8937 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8938 tcg_temp_free_i32(fp32
);
8939 gen_store_fpr64(ctx
, fp64
, fd
);
8940 tcg_temp_free_i64(fp64
);
8944 check_cp1_64bitmode(ctx
);
8946 TCGv_i32 fp32
= tcg_temp_new_i32();
8947 TCGv_i64 fp64
= tcg_temp_new_i64();
8949 gen_load_fpr32(ctx
, fp32
, fs
);
8950 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8951 tcg_temp_free_i32(fp32
);
8952 gen_store_fpr64(ctx
, fp64
, fd
);
8953 tcg_temp_free_i64(fp64
);
8957 check_cp1_64bitmode(ctx
);
8959 TCGv_i32 fp32
= tcg_temp_new_i32();
8960 TCGv_i64 fp64
= tcg_temp_new_i64();
8962 gen_load_fpr32(ctx
, fp32
, fs
);
8963 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8964 tcg_temp_free_i32(fp32
);
8965 gen_store_fpr64(ctx
, fp64
, fd
);
8966 tcg_temp_free_i64(fp64
);
8971 TCGv_i32 fp0
= tcg_temp_new_i32();
8973 gen_load_fpr32(ctx
, fp0
, fs
);
8974 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8975 gen_store_fpr32(ctx
, fp0
, fd
);
8976 tcg_temp_free_i32(fp0
);
8981 TCGv_i32 fp0
= tcg_temp_new_i32();
8983 gen_load_fpr32(ctx
, fp0
, fs
);
8984 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8985 gen_store_fpr32(ctx
, fp0
, fd
);
8986 tcg_temp_free_i32(fp0
);
8991 TCGv_i32 fp0
= tcg_temp_new_i32();
8993 gen_load_fpr32(ctx
, fp0
, fs
);
8994 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8995 gen_store_fpr32(ctx
, fp0
, fd
);
8996 tcg_temp_free_i32(fp0
);
9001 TCGv_i32 fp0
= tcg_temp_new_i32();
9003 gen_load_fpr32(ctx
, fp0
, fs
);
9004 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
9005 gen_store_fpr32(ctx
, fp0
, fd
);
9006 tcg_temp_free_i32(fp0
);
9010 check_insn(ctx
, ISA_MIPS32R6
);
9011 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9014 check_insn(ctx
, ISA_MIPS32R6
);
9015 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9018 check_insn(ctx
, ISA_MIPS32R6
);
9019 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9023 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9028 TCGLabel
*l1
= gen_new_label();
9032 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9034 fp0
= tcg_temp_new_i32();
9035 gen_load_fpr32(ctx
, fp0
, fs
);
9036 gen_store_fpr32(ctx
, fp0
, fd
);
9037 tcg_temp_free_i32(fp0
);
9042 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9044 TCGLabel
*l1
= gen_new_label();
9048 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9049 fp0
= tcg_temp_new_i32();
9050 gen_load_fpr32(ctx
, fp0
, fs
);
9051 gen_store_fpr32(ctx
, fp0
, fd
);
9052 tcg_temp_free_i32(fp0
);
9059 TCGv_i32 fp0
= tcg_temp_new_i32();
9061 gen_load_fpr32(ctx
, fp0
, fs
);
9062 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9063 gen_store_fpr32(ctx
, fp0
, fd
);
9064 tcg_temp_free_i32(fp0
);
9069 TCGv_i32 fp0
= tcg_temp_new_i32();
9071 gen_load_fpr32(ctx
, fp0
, fs
);
9072 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9073 gen_store_fpr32(ctx
, fp0
, fd
);
9074 tcg_temp_free_i32(fp0
);
9078 check_insn(ctx
, ISA_MIPS32R6
);
9080 TCGv_i32 fp0
= tcg_temp_new_i32();
9081 TCGv_i32 fp1
= tcg_temp_new_i32();
9082 TCGv_i32 fp2
= tcg_temp_new_i32();
9083 gen_load_fpr32(ctx
, fp0
, fs
);
9084 gen_load_fpr32(ctx
, fp1
, ft
);
9085 gen_load_fpr32(ctx
, fp2
, fd
);
9086 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9087 gen_store_fpr32(ctx
, fp2
, fd
);
9088 tcg_temp_free_i32(fp2
);
9089 tcg_temp_free_i32(fp1
);
9090 tcg_temp_free_i32(fp0
);
9094 check_insn(ctx
, ISA_MIPS32R6
);
9096 TCGv_i32 fp0
= tcg_temp_new_i32();
9097 TCGv_i32 fp1
= tcg_temp_new_i32();
9098 TCGv_i32 fp2
= tcg_temp_new_i32();
9099 gen_load_fpr32(ctx
, fp0
, fs
);
9100 gen_load_fpr32(ctx
, fp1
, ft
);
9101 gen_load_fpr32(ctx
, fp2
, fd
);
9102 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9103 gen_store_fpr32(ctx
, fp2
, fd
);
9104 tcg_temp_free_i32(fp2
);
9105 tcg_temp_free_i32(fp1
);
9106 tcg_temp_free_i32(fp0
);
9110 check_insn(ctx
, ISA_MIPS32R6
);
9112 TCGv_i32 fp0
= tcg_temp_new_i32();
9113 gen_load_fpr32(ctx
, fp0
, fs
);
9114 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9115 gen_store_fpr32(ctx
, fp0
, fd
);
9116 tcg_temp_free_i32(fp0
);
9120 check_insn(ctx
, ISA_MIPS32R6
);
9122 TCGv_i32 fp0
= tcg_temp_new_i32();
9123 gen_load_fpr32(ctx
, fp0
, fs
);
9124 gen_helper_float_class_s(fp0
, fp0
);
9125 gen_store_fpr32(ctx
, fp0
, fd
);
9126 tcg_temp_free_i32(fp0
);
9129 case OPC_MIN_S
: /* OPC_RECIP2_S */
9130 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9132 TCGv_i32 fp0
= tcg_temp_new_i32();
9133 TCGv_i32 fp1
= tcg_temp_new_i32();
9134 TCGv_i32 fp2
= tcg_temp_new_i32();
9135 gen_load_fpr32(ctx
, fp0
, fs
);
9136 gen_load_fpr32(ctx
, fp1
, ft
);
9137 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9138 gen_store_fpr32(ctx
, fp2
, fd
);
9139 tcg_temp_free_i32(fp2
);
9140 tcg_temp_free_i32(fp1
);
9141 tcg_temp_free_i32(fp0
);
9144 check_cp1_64bitmode(ctx
);
9146 TCGv_i32 fp0
= tcg_temp_new_i32();
9147 TCGv_i32 fp1
= tcg_temp_new_i32();
9149 gen_load_fpr32(ctx
, fp0
, fs
);
9150 gen_load_fpr32(ctx
, fp1
, ft
);
9151 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9152 tcg_temp_free_i32(fp1
);
9153 gen_store_fpr32(ctx
, fp0
, fd
);
9154 tcg_temp_free_i32(fp0
);
9158 case OPC_MINA_S
: /* OPC_RECIP1_S */
9159 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9161 TCGv_i32 fp0
= tcg_temp_new_i32();
9162 TCGv_i32 fp1
= tcg_temp_new_i32();
9163 TCGv_i32 fp2
= tcg_temp_new_i32();
9164 gen_load_fpr32(ctx
, fp0
, fs
);
9165 gen_load_fpr32(ctx
, fp1
, ft
);
9166 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9167 gen_store_fpr32(ctx
, fp2
, fd
);
9168 tcg_temp_free_i32(fp2
);
9169 tcg_temp_free_i32(fp1
);
9170 tcg_temp_free_i32(fp0
);
9173 check_cp1_64bitmode(ctx
);
9175 TCGv_i32 fp0
= tcg_temp_new_i32();
9177 gen_load_fpr32(ctx
, fp0
, fs
);
9178 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9179 gen_store_fpr32(ctx
, fp0
, fd
);
9180 tcg_temp_free_i32(fp0
);
9184 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9185 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9187 TCGv_i32 fp0
= tcg_temp_new_i32();
9188 TCGv_i32 fp1
= tcg_temp_new_i32();
9189 gen_load_fpr32(ctx
, fp0
, fs
);
9190 gen_load_fpr32(ctx
, fp1
, ft
);
9191 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9192 gen_store_fpr32(ctx
, fp1
, fd
);
9193 tcg_temp_free_i32(fp1
);
9194 tcg_temp_free_i32(fp0
);
9197 check_cp1_64bitmode(ctx
);
9199 TCGv_i32 fp0
= tcg_temp_new_i32();
9201 gen_load_fpr32(ctx
, fp0
, fs
);
9202 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9203 gen_store_fpr32(ctx
, fp0
, fd
);
9204 tcg_temp_free_i32(fp0
);
9208 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9209 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9211 TCGv_i32 fp0
= tcg_temp_new_i32();
9212 TCGv_i32 fp1
= tcg_temp_new_i32();
9213 gen_load_fpr32(ctx
, fp0
, fs
);
9214 gen_load_fpr32(ctx
, fp1
, ft
);
9215 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9216 gen_store_fpr32(ctx
, fp1
, fd
);
9217 tcg_temp_free_i32(fp1
);
9218 tcg_temp_free_i32(fp0
);
9221 check_cp1_64bitmode(ctx
);
9223 TCGv_i32 fp0
= tcg_temp_new_i32();
9224 TCGv_i32 fp1
= tcg_temp_new_i32();
9226 gen_load_fpr32(ctx
, fp0
, fs
);
9227 gen_load_fpr32(ctx
, fp1
, ft
);
9228 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9229 tcg_temp_free_i32(fp1
);
9230 gen_store_fpr32(ctx
, fp0
, fd
);
9231 tcg_temp_free_i32(fp0
);
9236 check_cp1_registers(ctx
, fd
);
9238 TCGv_i32 fp32
= tcg_temp_new_i32();
9239 TCGv_i64 fp64
= tcg_temp_new_i64();
9241 gen_load_fpr32(ctx
, fp32
, fs
);
9242 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9243 tcg_temp_free_i32(fp32
);
9244 gen_store_fpr64(ctx
, fp64
, fd
);
9245 tcg_temp_free_i64(fp64
);
9250 TCGv_i32 fp0
= tcg_temp_new_i32();
9252 gen_load_fpr32(ctx
, fp0
, fs
);
9253 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9254 gen_store_fpr32(ctx
, fp0
, fd
);
9255 tcg_temp_free_i32(fp0
);
9259 check_cp1_64bitmode(ctx
);
9261 TCGv_i32 fp32
= tcg_temp_new_i32();
9262 TCGv_i64 fp64
= tcg_temp_new_i64();
9264 gen_load_fpr32(ctx
, fp32
, fs
);
9265 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9266 tcg_temp_free_i32(fp32
);
9267 gen_store_fpr64(ctx
, fp64
, fd
);
9268 tcg_temp_free_i64(fp64
);
9274 TCGv_i64 fp64
= tcg_temp_new_i64();
9275 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9276 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9278 gen_load_fpr32(ctx
, fp32_0
, fs
);
9279 gen_load_fpr32(ctx
, fp32_1
, ft
);
9280 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9281 tcg_temp_free_i32(fp32_1
);
9282 tcg_temp_free_i32(fp32_0
);
9283 gen_store_fpr64(ctx
, fp64
, fd
);
9284 tcg_temp_free_i64(fp64
);
9296 case OPC_CMP_NGLE_S
:
9303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9304 if (ctx
->opcode
& (1 << 6)) {
9305 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9307 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9311 check_cp1_registers(ctx
, fs
| ft
| fd
);
9313 TCGv_i64 fp0
= tcg_temp_new_i64();
9314 TCGv_i64 fp1
= tcg_temp_new_i64();
9316 gen_load_fpr64(ctx
, fp0
, fs
);
9317 gen_load_fpr64(ctx
, fp1
, ft
);
9318 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9319 tcg_temp_free_i64(fp1
);
9320 gen_store_fpr64(ctx
, fp0
, fd
);
9321 tcg_temp_free_i64(fp0
);
9325 check_cp1_registers(ctx
, fs
| ft
| fd
);
9327 TCGv_i64 fp0
= tcg_temp_new_i64();
9328 TCGv_i64 fp1
= tcg_temp_new_i64();
9330 gen_load_fpr64(ctx
, fp0
, fs
);
9331 gen_load_fpr64(ctx
, fp1
, ft
);
9332 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9333 tcg_temp_free_i64(fp1
);
9334 gen_store_fpr64(ctx
, fp0
, fd
);
9335 tcg_temp_free_i64(fp0
);
9339 check_cp1_registers(ctx
, fs
| ft
| fd
);
9341 TCGv_i64 fp0
= tcg_temp_new_i64();
9342 TCGv_i64 fp1
= tcg_temp_new_i64();
9344 gen_load_fpr64(ctx
, fp0
, fs
);
9345 gen_load_fpr64(ctx
, fp1
, ft
);
9346 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9347 tcg_temp_free_i64(fp1
);
9348 gen_store_fpr64(ctx
, fp0
, fd
);
9349 tcg_temp_free_i64(fp0
);
9353 check_cp1_registers(ctx
, fs
| ft
| fd
);
9355 TCGv_i64 fp0
= tcg_temp_new_i64();
9356 TCGv_i64 fp1
= tcg_temp_new_i64();
9358 gen_load_fpr64(ctx
, fp0
, fs
);
9359 gen_load_fpr64(ctx
, fp1
, ft
);
9360 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9361 tcg_temp_free_i64(fp1
);
9362 gen_store_fpr64(ctx
, fp0
, fd
);
9363 tcg_temp_free_i64(fp0
);
9367 check_cp1_registers(ctx
, fs
| fd
);
9369 TCGv_i64 fp0
= tcg_temp_new_i64();
9371 gen_load_fpr64(ctx
, fp0
, fs
);
9372 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9373 gen_store_fpr64(ctx
, fp0
, fd
);
9374 tcg_temp_free_i64(fp0
);
9378 check_cp1_registers(ctx
, fs
| fd
);
9380 TCGv_i64 fp0
= tcg_temp_new_i64();
9382 gen_load_fpr64(ctx
, fp0
, fs
);
9383 gen_helper_float_abs_d(fp0
, fp0
);
9384 gen_store_fpr64(ctx
, fp0
, fd
);
9385 tcg_temp_free_i64(fp0
);
9389 check_cp1_registers(ctx
, fs
| fd
);
9391 TCGv_i64 fp0
= tcg_temp_new_i64();
9393 gen_load_fpr64(ctx
, fp0
, fs
);
9394 gen_store_fpr64(ctx
, fp0
, fd
);
9395 tcg_temp_free_i64(fp0
);
9399 check_cp1_registers(ctx
, fs
| fd
);
9401 TCGv_i64 fp0
= tcg_temp_new_i64();
9403 gen_load_fpr64(ctx
, fp0
, fs
);
9404 gen_helper_float_chs_d(fp0
, fp0
);
9405 gen_store_fpr64(ctx
, fp0
, fd
);
9406 tcg_temp_free_i64(fp0
);
9410 check_cp1_64bitmode(ctx
);
9412 TCGv_i64 fp0
= tcg_temp_new_i64();
9414 gen_load_fpr64(ctx
, fp0
, fs
);
9415 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9416 gen_store_fpr64(ctx
, fp0
, fd
);
9417 tcg_temp_free_i64(fp0
);
9421 check_cp1_64bitmode(ctx
);
9423 TCGv_i64 fp0
= tcg_temp_new_i64();
9425 gen_load_fpr64(ctx
, fp0
, fs
);
9426 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9427 gen_store_fpr64(ctx
, fp0
, fd
);
9428 tcg_temp_free_i64(fp0
);
9432 check_cp1_64bitmode(ctx
);
9434 TCGv_i64 fp0
= tcg_temp_new_i64();
9436 gen_load_fpr64(ctx
, fp0
, fs
);
9437 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9438 gen_store_fpr64(ctx
, fp0
, fd
);
9439 tcg_temp_free_i64(fp0
);
9443 check_cp1_64bitmode(ctx
);
9445 TCGv_i64 fp0
= tcg_temp_new_i64();
9447 gen_load_fpr64(ctx
, fp0
, fs
);
9448 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9449 gen_store_fpr64(ctx
, fp0
, fd
);
9450 tcg_temp_free_i64(fp0
);
9454 check_cp1_registers(ctx
, fs
);
9456 TCGv_i32 fp32
= tcg_temp_new_i32();
9457 TCGv_i64 fp64
= tcg_temp_new_i64();
9459 gen_load_fpr64(ctx
, fp64
, fs
);
9460 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9461 tcg_temp_free_i64(fp64
);
9462 gen_store_fpr32(ctx
, fp32
, fd
);
9463 tcg_temp_free_i32(fp32
);
9467 check_cp1_registers(ctx
, fs
);
9469 TCGv_i32 fp32
= tcg_temp_new_i32();
9470 TCGv_i64 fp64
= tcg_temp_new_i64();
9472 gen_load_fpr64(ctx
, fp64
, fs
);
9473 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9474 tcg_temp_free_i64(fp64
);
9475 gen_store_fpr32(ctx
, fp32
, fd
);
9476 tcg_temp_free_i32(fp32
);
9480 check_cp1_registers(ctx
, fs
);
9482 TCGv_i32 fp32
= tcg_temp_new_i32();
9483 TCGv_i64 fp64
= tcg_temp_new_i64();
9485 gen_load_fpr64(ctx
, fp64
, fs
);
9486 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9487 tcg_temp_free_i64(fp64
);
9488 gen_store_fpr32(ctx
, fp32
, fd
);
9489 tcg_temp_free_i32(fp32
);
9493 check_cp1_registers(ctx
, fs
);
9495 TCGv_i32 fp32
= tcg_temp_new_i32();
9496 TCGv_i64 fp64
= tcg_temp_new_i64();
9498 gen_load_fpr64(ctx
, fp64
, fs
);
9499 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9500 tcg_temp_free_i64(fp64
);
9501 gen_store_fpr32(ctx
, fp32
, fd
);
9502 tcg_temp_free_i32(fp32
);
9506 check_insn(ctx
, ISA_MIPS32R6
);
9507 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9510 check_insn(ctx
, ISA_MIPS32R6
);
9511 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9514 check_insn(ctx
, ISA_MIPS32R6
);
9515 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9518 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9519 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9522 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9524 TCGLabel
*l1
= gen_new_label();
9528 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9530 fp0
= tcg_temp_new_i64();
9531 gen_load_fpr64(ctx
, fp0
, fs
);
9532 gen_store_fpr64(ctx
, fp0
, fd
);
9533 tcg_temp_free_i64(fp0
);
9538 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9540 TCGLabel
*l1
= gen_new_label();
9544 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9545 fp0
= tcg_temp_new_i64();
9546 gen_load_fpr64(ctx
, fp0
, fs
);
9547 gen_store_fpr64(ctx
, fp0
, fd
);
9548 tcg_temp_free_i64(fp0
);
9554 check_cp1_registers(ctx
, fs
| fd
);
9556 TCGv_i64 fp0
= tcg_temp_new_i64();
9558 gen_load_fpr64(ctx
, fp0
, fs
);
9559 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9560 gen_store_fpr64(ctx
, fp0
, fd
);
9561 tcg_temp_free_i64(fp0
);
9565 check_cp1_registers(ctx
, fs
| fd
);
9567 TCGv_i64 fp0
= tcg_temp_new_i64();
9569 gen_load_fpr64(ctx
, fp0
, fs
);
9570 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9571 gen_store_fpr64(ctx
, fp0
, fd
);
9572 tcg_temp_free_i64(fp0
);
9576 check_insn(ctx
, ISA_MIPS32R6
);
9578 TCGv_i64 fp0
= tcg_temp_new_i64();
9579 TCGv_i64 fp1
= tcg_temp_new_i64();
9580 TCGv_i64 fp2
= tcg_temp_new_i64();
9581 gen_load_fpr64(ctx
, fp0
, fs
);
9582 gen_load_fpr64(ctx
, fp1
, ft
);
9583 gen_load_fpr64(ctx
, fp2
, fd
);
9584 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9585 gen_store_fpr64(ctx
, fp2
, fd
);
9586 tcg_temp_free_i64(fp2
);
9587 tcg_temp_free_i64(fp1
);
9588 tcg_temp_free_i64(fp0
);
9592 check_insn(ctx
, ISA_MIPS32R6
);
9594 TCGv_i64 fp0
= tcg_temp_new_i64();
9595 TCGv_i64 fp1
= tcg_temp_new_i64();
9596 TCGv_i64 fp2
= tcg_temp_new_i64();
9597 gen_load_fpr64(ctx
, fp0
, fs
);
9598 gen_load_fpr64(ctx
, fp1
, ft
);
9599 gen_load_fpr64(ctx
, fp2
, fd
);
9600 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9601 gen_store_fpr64(ctx
, fp2
, fd
);
9602 tcg_temp_free_i64(fp2
);
9603 tcg_temp_free_i64(fp1
);
9604 tcg_temp_free_i64(fp0
);
9608 check_insn(ctx
, ISA_MIPS32R6
);
9610 TCGv_i64 fp0
= tcg_temp_new_i64();
9611 gen_load_fpr64(ctx
, fp0
, fs
);
9612 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9613 gen_store_fpr64(ctx
, fp0
, fd
);
9614 tcg_temp_free_i64(fp0
);
9618 check_insn(ctx
, ISA_MIPS32R6
);
9620 TCGv_i64 fp0
= tcg_temp_new_i64();
9621 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_helper_float_class_d(fp0
, fp0
);
9623 gen_store_fpr64(ctx
, fp0
, fd
);
9624 tcg_temp_free_i64(fp0
);
9627 case OPC_MIN_D
: /* OPC_RECIP2_D */
9628 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9630 TCGv_i64 fp0
= tcg_temp_new_i64();
9631 TCGv_i64 fp1
= tcg_temp_new_i64();
9632 gen_load_fpr64(ctx
, fp0
, fs
);
9633 gen_load_fpr64(ctx
, fp1
, ft
);
9634 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9635 gen_store_fpr64(ctx
, fp1
, fd
);
9636 tcg_temp_free_i64(fp1
);
9637 tcg_temp_free_i64(fp0
);
9640 check_cp1_64bitmode(ctx
);
9642 TCGv_i64 fp0
= tcg_temp_new_i64();
9643 TCGv_i64 fp1
= tcg_temp_new_i64();
9645 gen_load_fpr64(ctx
, fp0
, fs
);
9646 gen_load_fpr64(ctx
, fp1
, ft
);
9647 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9648 tcg_temp_free_i64(fp1
);
9649 gen_store_fpr64(ctx
, fp0
, fd
);
9650 tcg_temp_free_i64(fp0
);
9654 case OPC_MINA_D
: /* OPC_RECIP1_D */
9655 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9657 TCGv_i64 fp0
= tcg_temp_new_i64();
9658 TCGv_i64 fp1
= tcg_temp_new_i64();
9659 gen_load_fpr64(ctx
, fp0
, fs
);
9660 gen_load_fpr64(ctx
, fp1
, ft
);
9661 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9662 gen_store_fpr64(ctx
, fp1
, fd
);
9663 tcg_temp_free_i64(fp1
);
9664 tcg_temp_free_i64(fp0
);
9667 check_cp1_64bitmode(ctx
);
9669 TCGv_i64 fp0
= tcg_temp_new_i64();
9671 gen_load_fpr64(ctx
, fp0
, fs
);
9672 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9673 gen_store_fpr64(ctx
, fp0
, fd
);
9674 tcg_temp_free_i64(fp0
);
9678 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9679 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9681 TCGv_i64 fp0
= tcg_temp_new_i64();
9682 TCGv_i64 fp1
= tcg_temp_new_i64();
9683 gen_load_fpr64(ctx
, fp0
, fs
);
9684 gen_load_fpr64(ctx
, fp1
, ft
);
9685 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9686 gen_store_fpr64(ctx
, fp1
, fd
);
9687 tcg_temp_free_i64(fp1
);
9688 tcg_temp_free_i64(fp0
);
9691 check_cp1_64bitmode(ctx
);
9693 TCGv_i64 fp0
= tcg_temp_new_i64();
9695 gen_load_fpr64(ctx
, fp0
, fs
);
9696 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9697 gen_store_fpr64(ctx
, fp0
, fd
);
9698 tcg_temp_free_i64(fp0
);
9702 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9703 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9705 TCGv_i64 fp0
= tcg_temp_new_i64();
9706 TCGv_i64 fp1
= tcg_temp_new_i64();
9707 gen_load_fpr64(ctx
, fp0
, fs
);
9708 gen_load_fpr64(ctx
, fp1
, ft
);
9709 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9710 gen_store_fpr64(ctx
, fp1
, fd
);
9711 tcg_temp_free_i64(fp1
);
9712 tcg_temp_free_i64(fp0
);
9715 check_cp1_64bitmode(ctx
);
9717 TCGv_i64 fp0
= tcg_temp_new_i64();
9718 TCGv_i64 fp1
= tcg_temp_new_i64();
9720 gen_load_fpr64(ctx
, fp0
, fs
);
9721 gen_load_fpr64(ctx
, fp1
, ft
);
9722 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9723 tcg_temp_free_i64(fp1
);
9724 gen_store_fpr64(ctx
, fp0
, fd
);
9725 tcg_temp_free_i64(fp0
);
9738 case OPC_CMP_NGLE_D
:
9745 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9746 if (ctx
->opcode
& (1 << 6)) {
9747 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9749 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9753 check_cp1_registers(ctx
, fs
);
9755 TCGv_i32 fp32
= tcg_temp_new_i32();
9756 TCGv_i64 fp64
= tcg_temp_new_i64();
9758 gen_load_fpr64(ctx
, fp64
, fs
);
9759 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9760 tcg_temp_free_i64(fp64
);
9761 gen_store_fpr32(ctx
, fp32
, fd
);
9762 tcg_temp_free_i32(fp32
);
9766 check_cp1_registers(ctx
, fs
);
9768 TCGv_i32 fp32
= tcg_temp_new_i32();
9769 TCGv_i64 fp64
= tcg_temp_new_i64();
9771 gen_load_fpr64(ctx
, fp64
, fs
);
9772 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9773 tcg_temp_free_i64(fp64
);
9774 gen_store_fpr32(ctx
, fp32
, fd
);
9775 tcg_temp_free_i32(fp32
);
9779 check_cp1_64bitmode(ctx
);
9781 TCGv_i64 fp0
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9785 gen_store_fpr64(ctx
, fp0
, fd
);
9786 tcg_temp_free_i64(fp0
);
9791 TCGv_i32 fp0
= tcg_temp_new_i32();
9793 gen_load_fpr32(ctx
, fp0
, fs
);
9794 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9795 gen_store_fpr32(ctx
, fp0
, fd
);
9796 tcg_temp_free_i32(fp0
);
9800 check_cp1_registers(ctx
, fd
);
9802 TCGv_i32 fp32
= tcg_temp_new_i32();
9803 TCGv_i64 fp64
= tcg_temp_new_i64();
9805 gen_load_fpr32(ctx
, fp32
, fs
);
9806 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9807 tcg_temp_free_i32(fp32
);
9808 gen_store_fpr64(ctx
, fp64
, fd
);
9809 tcg_temp_free_i64(fp64
);
9813 check_cp1_64bitmode(ctx
);
9815 TCGv_i32 fp32
= tcg_temp_new_i32();
9816 TCGv_i64 fp64
= tcg_temp_new_i64();
9818 gen_load_fpr64(ctx
, fp64
, fs
);
9819 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9820 tcg_temp_free_i64(fp64
);
9821 gen_store_fpr32(ctx
, fp32
, fd
);
9822 tcg_temp_free_i32(fp32
);
9826 check_cp1_64bitmode(ctx
);
9828 TCGv_i64 fp0
= tcg_temp_new_i64();
9830 gen_load_fpr64(ctx
, fp0
, fs
);
9831 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9832 gen_store_fpr64(ctx
, fp0
, fd
);
9833 tcg_temp_free_i64(fp0
);
9839 TCGv_i64 fp0
= tcg_temp_new_i64();
9841 gen_load_fpr64(ctx
, fp0
, fs
);
9842 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9843 gen_store_fpr64(ctx
, fp0
, fd
);
9844 tcg_temp_free_i64(fp0
);
9850 TCGv_i64 fp0
= tcg_temp_new_i64();
9851 TCGv_i64 fp1
= tcg_temp_new_i64();
9853 gen_load_fpr64(ctx
, fp0
, fs
);
9854 gen_load_fpr64(ctx
, fp1
, ft
);
9855 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9856 tcg_temp_free_i64(fp1
);
9857 gen_store_fpr64(ctx
, fp0
, fd
);
9858 tcg_temp_free_i64(fp0
);
9864 TCGv_i64 fp0
= tcg_temp_new_i64();
9865 TCGv_i64 fp1
= tcg_temp_new_i64();
9867 gen_load_fpr64(ctx
, fp0
, fs
);
9868 gen_load_fpr64(ctx
, fp1
, ft
);
9869 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9870 tcg_temp_free_i64(fp1
);
9871 gen_store_fpr64(ctx
, fp0
, fd
);
9872 tcg_temp_free_i64(fp0
);
9878 TCGv_i64 fp0
= tcg_temp_new_i64();
9879 TCGv_i64 fp1
= tcg_temp_new_i64();
9881 gen_load_fpr64(ctx
, fp0
, fs
);
9882 gen_load_fpr64(ctx
, fp1
, ft
);
9883 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9884 tcg_temp_free_i64(fp1
);
9885 gen_store_fpr64(ctx
, fp0
, fd
);
9886 tcg_temp_free_i64(fp0
);
9892 TCGv_i64 fp0
= tcg_temp_new_i64();
9894 gen_load_fpr64(ctx
, fp0
, fs
);
9895 gen_helper_float_abs_ps(fp0
, fp0
);
9896 gen_store_fpr64(ctx
, fp0
, fd
);
9897 tcg_temp_free_i64(fp0
);
9903 TCGv_i64 fp0
= tcg_temp_new_i64();
9905 gen_load_fpr64(ctx
, fp0
, fs
);
9906 gen_store_fpr64(ctx
, fp0
, fd
);
9907 tcg_temp_free_i64(fp0
);
9913 TCGv_i64 fp0
= tcg_temp_new_i64();
9915 gen_load_fpr64(ctx
, fp0
, fs
);
9916 gen_helper_float_chs_ps(fp0
, fp0
);
9917 gen_store_fpr64(ctx
, fp0
, fd
);
9918 tcg_temp_free_i64(fp0
);
9923 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9928 TCGLabel
*l1
= gen_new_label();
9932 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9933 fp0
= tcg_temp_new_i64();
9934 gen_load_fpr64(ctx
, fp0
, fs
);
9935 gen_store_fpr64(ctx
, fp0
, fd
);
9936 tcg_temp_free_i64(fp0
);
9943 TCGLabel
*l1
= gen_new_label();
9947 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9948 fp0
= tcg_temp_new_i64();
9949 gen_load_fpr64(ctx
, fp0
, fs
);
9950 gen_store_fpr64(ctx
, fp0
, fd
);
9951 tcg_temp_free_i64(fp0
);
9959 TCGv_i64 fp0
= tcg_temp_new_i64();
9960 TCGv_i64 fp1
= tcg_temp_new_i64();
9962 gen_load_fpr64(ctx
, fp0
, ft
);
9963 gen_load_fpr64(ctx
, fp1
, fs
);
9964 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9965 tcg_temp_free_i64(fp1
);
9966 gen_store_fpr64(ctx
, fp0
, fd
);
9967 tcg_temp_free_i64(fp0
);
9973 TCGv_i64 fp0
= tcg_temp_new_i64();
9974 TCGv_i64 fp1
= tcg_temp_new_i64();
9976 gen_load_fpr64(ctx
, fp0
, ft
);
9977 gen_load_fpr64(ctx
, fp1
, fs
);
9978 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9979 tcg_temp_free_i64(fp1
);
9980 gen_store_fpr64(ctx
, fp0
, fd
);
9981 tcg_temp_free_i64(fp0
);
9987 TCGv_i64 fp0
= tcg_temp_new_i64();
9988 TCGv_i64 fp1
= tcg_temp_new_i64();
9990 gen_load_fpr64(ctx
, fp0
, fs
);
9991 gen_load_fpr64(ctx
, fp1
, ft
);
9992 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9993 tcg_temp_free_i64(fp1
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
10001 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 gen_load_fpr64(ctx
, fp0
, fs
);
10004 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10005 gen_store_fpr64(ctx
, fp0
, fd
);
10006 tcg_temp_free_i64(fp0
);
10009 case OPC_RSQRT1_PS
:
10012 TCGv_i64 fp0
= tcg_temp_new_i64();
10014 gen_load_fpr64(ctx
, fp0
, fs
);
10015 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10016 gen_store_fpr64(ctx
, fp0
, fd
);
10017 tcg_temp_free_i64(fp0
);
10020 case OPC_RSQRT2_PS
:
10023 TCGv_i64 fp0
= tcg_temp_new_i64();
10024 TCGv_i64 fp1
= tcg_temp_new_i64();
10026 gen_load_fpr64(ctx
, fp0
, fs
);
10027 gen_load_fpr64(ctx
, fp1
, ft
);
10028 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10029 tcg_temp_free_i64(fp1
);
10030 gen_store_fpr64(ctx
, fp0
, fd
);
10031 tcg_temp_free_i64(fp0
);
10035 check_cp1_64bitmode(ctx
);
10037 TCGv_i32 fp0
= tcg_temp_new_i32();
10039 gen_load_fpr32h(ctx
, fp0
, fs
);
10040 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10041 gen_store_fpr32(ctx
, fp0
, fd
);
10042 tcg_temp_free_i32(fp0
);
10045 case OPC_CVT_PW_PS
:
10048 TCGv_i64 fp0
= tcg_temp_new_i64();
10050 gen_load_fpr64(ctx
, fp0
, fs
);
10051 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10052 gen_store_fpr64(ctx
, fp0
, fd
);
10053 tcg_temp_free_i64(fp0
);
10057 check_cp1_64bitmode(ctx
);
10059 TCGv_i32 fp0
= tcg_temp_new_i32();
10061 gen_load_fpr32(ctx
, fp0
, fs
);
10062 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10063 gen_store_fpr32(ctx
, fp0
, fd
);
10064 tcg_temp_free_i32(fp0
);
10070 TCGv_i32 fp0
= tcg_temp_new_i32();
10071 TCGv_i32 fp1
= tcg_temp_new_i32();
10073 gen_load_fpr32(ctx
, fp0
, fs
);
10074 gen_load_fpr32(ctx
, fp1
, ft
);
10075 gen_store_fpr32h(ctx
, fp0
, fd
);
10076 gen_store_fpr32(ctx
, fp1
, fd
);
10077 tcg_temp_free_i32(fp0
);
10078 tcg_temp_free_i32(fp1
);
10084 TCGv_i32 fp0
= tcg_temp_new_i32();
10085 TCGv_i32 fp1
= tcg_temp_new_i32();
10087 gen_load_fpr32(ctx
, fp0
, fs
);
10088 gen_load_fpr32h(ctx
, fp1
, ft
);
10089 gen_store_fpr32(ctx
, fp1
, fd
);
10090 gen_store_fpr32h(ctx
, fp0
, fd
);
10091 tcg_temp_free_i32(fp0
);
10092 tcg_temp_free_i32(fp1
);
10098 TCGv_i32 fp0
= tcg_temp_new_i32();
10099 TCGv_i32 fp1
= tcg_temp_new_i32();
10101 gen_load_fpr32h(ctx
, fp0
, fs
);
10102 gen_load_fpr32(ctx
, fp1
, ft
);
10103 gen_store_fpr32(ctx
, fp1
, fd
);
10104 gen_store_fpr32h(ctx
, fp0
, fd
);
10105 tcg_temp_free_i32(fp0
);
10106 tcg_temp_free_i32(fp1
);
10112 TCGv_i32 fp0
= tcg_temp_new_i32();
10113 TCGv_i32 fp1
= tcg_temp_new_i32();
10115 gen_load_fpr32h(ctx
, fp0
, fs
);
10116 gen_load_fpr32h(ctx
, fp1
, ft
);
10117 gen_store_fpr32(ctx
, fp1
, fd
);
10118 gen_store_fpr32h(ctx
, fp0
, fd
);
10119 tcg_temp_free_i32(fp0
);
10120 tcg_temp_free_i32(fp1
);
10124 case OPC_CMP_UN_PS
:
10125 case OPC_CMP_EQ_PS
:
10126 case OPC_CMP_UEQ_PS
:
10127 case OPC_CMP_OLT_PS
:
10128 case OPC_CMP_ULT_PS
:
10129 case OPC_CMP_OLE_PS
:
10130 case OPC_CMP_ULE_PS
:
10131 case OPC_CMP_SF_PS
:
10132 case OPC_CMP_NGLE_PS
:
10133 case OPC_CMP_SEQ_PS
:
10134 case OPC_CMP_NGL_PS
:
10135 case OPC_CMP_LT_PS
:
10136 case OPC_CMP_NGE_PS
:
10137 case OPC_CMP_LE_PS
:
10138 case OPC_CMP_NGT_PS
:
10139 if (ctx
->opcode
& (1 << 6)) {
10140 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10142 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10146 MIPS_INVAL("farith");
10147 generate_exception_end(ctx
, EXCP_RI
);
10152 /* Coprocessor 3 (FPU) */
10153 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10154 int fd
, int fs
, int base
, int index
)
10156 TCGv t0
= tcg_temp_new();
10159 gen_load_gpr(t0
, index
);
10160 } else if (index
== 0) {
10161 gen_load_gpr(t0
, base
);
10163 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10165 /* Don't do NOP if destination is zero: we must perform the actual
10171 TCGv_i32 fp0
= tcg_temp_new_i32();
10173 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10174 tcg_gen_trunc_tl_i32(fp0
, t0
);
10175 gen_store_fpr32(ctx
, fp0
, fd
);
10176 tcg_temp_free_i32(fp0
);
10181 check_cp1_registers(ctx
, fd
);
10183 TCGv_i64 fp0
= tcg_temp_new_i64();
10184 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10185 gen_store_fpr64(ctx
, fp0
, fd
);
10186 tcg_temp_free_i64(fp0
);
10190 check_cp1_64bitmode(ctx
);
10191 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10193 TCGv_i64 fp0
= tcg_temp_new_i64();
10195 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10196 gen_store_fpr64(ctx
, fp0
, fd
);
10197 tcg_temp_free_i64(fp0
);
10203 TCGv_i32 fp0
= tcg_temp_new_i32();
10204 gen_load_fpr32(ctx
, fp0
, fs
);
10205 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10206 tcg_temp_free_i32(fp0
);
10211 check_cp1_registers(ctx
, fs
);
10213 TCGv_i64 fp0
= tcg_temp_new_i64();
10214 gen_load_fpr64(ctx
, fp0
, fs
);
10215 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10216 tcg_temp_free_i64(fp0
);
10220 check_cp1_64bitmode(ctx
);
10221 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10223 TCGv_i64 fp0
= tcg_temp_new_i64();
10224 gen_load_fpr64(ctx
, fp0
, fs
);
10225 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10226 tcg_temp_free_i64(fp0
);
10233 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10234 int fd
, int fr
, int fs
, int ft
)
10240 TCGv t0
= tcg_temp_local_new();
10241 TCGv_i32 fp
= tcg_temp_new_i32();
10242 TCGv_i32 fph
= tcg_temp_new_i32();
10243 TCGLabel
*l1
= gen_new_label();
10244 TCGLabel
*l2
= gen_new_label();
10246 gen_load_gpr(t0
, fr
);
10247 tcg_gen_andi_tl(t0
, t0
, 0x7);
10249 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10250 gen_load_fpr32(ctx
, fp
, fs
);
10251 gen_load_fpr32h(ctx
, fph
, fs
);
10252 gen_store_fpr32(ctx
, fp
, fd
);
10253 gen_store_fpr32h(ctx
, fph
, fd
);
10256 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10258 #ifdef TARGET_WORDS_BIGENDIAN
10259 gen_load_fpr32(ctx
, fp
, fs
);
10260 gen_load_fpr32h(ctx
, fph
, ft
);
10261 gen_store_fpr32h(ctx
, fp
, fd
);
10262 gen_store_fpr32(ctx
, fph
, fd
);
10264 gen_load_fpr32h(ctx
, fph
, fs
);
10265 gen_load_fpr32(ctx
, fp
, ft
);
10266 gen_store_fpr32(ctx
, fph
, fd
);
10267 gen_store_fpr32h(ctx
, fp
, fd
);
10270 tcg_temp_free_i32(fp
);
10271 tcg_temp_free_i32(fph
);
10277 TCGv_i32 fp0
= tcg_temp_new_i32();
10278 TCGv_i32 fp1
= tcg_temp_new_i32();
10279 TCGv_i32 fp2
= tcg_temp_new_i32();
10281 gen_load_fpr32(ctx
, fp0
, fs
);
10282 gen_load_fpr32(ctx
, fp1
, ft
);
10283 gen_load_fpr32(ctx
, fp2
, fr
);
10284 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10285 tcg_temp_free_i32(fp0
);
10286 tcg_temp_free_i32(fp1
);
10287 gen_store_fpr32(ctx
, fp2
, fd
);
10288 tcg_temp_free_i32(fp2
);
10293 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10295 TCGv_i64 fp0
= tcg_temp_new_i64();
10296 TCGv_i64 fp1
= tcg_temp_new_i64();
10297 TCGv_i64 fp2
= tcg_temp_new_i64();
10299 gen_load_fpr64(ctx
, fp0
, fs
);
10300 gen_load_fpr64(ctx
, fp1
, ft
);
10301 gen_load_fpr64(ctx
, fp2
, fr
);
10302 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10303 tcg_temp_free_i64(fp0
);
10304 tcg_temp_free_i64(fp1
);
10305 gen_store_fpr64(ctx
, fp2
, fd
);
10306 tcg_temp_free_i64(fp2
);
10312 TCGv_i64 fp0
= tcg_temp_new_i64();
10313 TCGv_i64 fp1
= tcg_temp_new_i64();
10314 TCGv_i64 fp2
= tcg_temp_new_i64();
10316 gen_load_fpr64(ctx
, fp0
, fs
);
10317 gen_load_fpr64(ctx
, fp1
, ft
);
10318 gen_load_fpr64(ctx
, fp2
, fr
);
10319 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10320 tcg_temp_free_i64(fp0
);
10321 tcg_temp_free_i64(fp1
);
10322 gen_store_fpr64(ctx
, fp2
, fd
);
10323 tcg_temp_free_i64(fp2
);
10329 TCGv_i32 fp0
= tcg_temp_new_i32();
10330 TCGv_i32 fp1
= tcg_temp_new_i32();
10331 TCGv_i32 fp2
= tcg_temp_new_i32();
10333 gen_load_fpr32(ctx
, fp0
, fs
);
10334 gen_load_fpr32(ctx
, fp1
, ft
);
10335 gen_load_fpr32(ctx
, fp2
, fr
);
10336 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10337 tcg_temp_free_i32(fp0
);
10338 tcg_temp_free_i32(fp1
);
10339 gen_store_fpr32(ctx
, fp2
, fd
);
10340 tcg_temp_free_i32(fp2
);
10345 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10347 TCGv_i64 fp0
= tcg_temp_new_i64();
10348 TCGv_i64 fp1
= tcg_temp_new_i64();
10349 TCGv_i64 fp2
= tcg_temp_new_i64();
10351 gen_load_fpr64(ctx
, fp0
, fs
);
10352 gen_load_fpr64(ctx
, fp1
, ft
);
10353 gen_load_fpr64(ctx
, fp2
, fr
);
10354 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10355 tcg_temp_free_i64(fp0
);
10356 tcg_temp_free_i64(fp1
);
10357 gen_store_fpr64(ctx
, fp2
, fd
);
10358 tcg_temp_free_i64(fp2
);
10364 TCGv_i64 fp0
= tcg_temp_new_i64();
10365 TCGv_i64 fp1
= tcg_temp_new_i64();
10366 TCGv_i64 fp2
= tcg_temp_new_i64();
10368 gen_load_fpr64(ctx
, fp0
, fs
);
10369 gen_load_fpr64(ctx
, fp1
, ft
);
10370 gen_load_fpr64(ctx
, fp2
, fr
);
10371 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10372 tcg_temp_free_i64(fp0
);
10373 tcg_temp_free_i64(fp1
);
10374 gen_store_fpr64(ctx
, fp2
, fd
);
10375 tcg_temp_free_i64(fp2
);
10381 TCGv_i32 fp0
= tcg_temp_new_i32();
10382 TCGv_i32 fp1
= tcg_temp_new_i32();
10383 TCGv_i32 fp2
= tcg_temp_new_i32();
10385 gen_load_fpr32(ctx
, fp0
, fs
);
10386 gen_load_fpr32(ctx
, fp1
, ft
);
10387 gen_load_fpr32(ctx
, fp2
, fr
);
10388 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10389 tcg_temp_free_i32(fp0
);
10390 tcg_temp_free_i32(fp1
);
10391 gen_store_fpr32(ctx
, fp2
, fd
);
10392 tcg_temp_free_i32(fp2
);
10397 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10399 TCGv_i64 fp0
= tcg_temp_new_i64();
10400 TCGv_i64 fp1
= tcg_temp_new_i64();
10401 TCGv_i64 fp2
= tcg_temp_new_i64();
10403 gen_load_fpr64(ctx
, fp0
, fs
);
10404 gen_load_fpr64(ctx
, fp1
, ft
);
10405 gen_load_fpr64(ctx
, fp2
, fr
);
10406 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10407 tcg_temp_free_i64(fp0
);
10408 tcg_temp_free_i64(fp1
);
10409 gen_store_fpr64(ctx
, fp2
, fd
);
10410 tcg_temp_free_i64(fp2
);
10416 TCGv_i64 fp0
= tcg_temp_new_i64();
10417 TCGv_i64 fp1
= tcg_temp_new_i64();
10418 TCGv_i64 fp2
= tcg_temp_new_i64();
10420 gen_load_fpr64(ctx
, fp0
, fs
);
10421 gen_load_fpr64(ctx
, fp1
, ft
);
10422 gen_load_fpr64(ctx
, fp2
, fr
);
10423 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10424 tcg_temp_free_i64(fp0
);
10425 tcg_temp_free_i64(fp1
);
10426 gen_store_fpr64(ctx
, fp2
, fd
);
10427 tcg_temp_free_i64(fp2
);
10433 TCGv_i32 fp0
= tcg_temp_new_i32();
10434 TCGv_i32 fp1
= tcg_temp_new_i32();
10435 TCGv_i32 fp2
= tcg_temp_new_i32();
10437 gen_load_fpr32(ctx
, fp0
, fs
);
10438 gen_load_fpr32(ctx
, fp1
, ft
);
10439 gen_load_fpr32(ctx
, fp2
, fr
);
10440 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10441 tcg_temp_free_i32(fp0
);
10442 tcg_temp_free_i32(fp1
);
10443 gen_store_fpr32(ctx
, fp2
, fd
);
10444 tcg_temp_free_i32(fp2
);
10449 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10451 TCGv_i64 fp0
= tcg_temp_new_i64();
10452 TCGv_i64 fp1
= tcg_temp_new_i64();
10453 TCGv_i64 fp2
= tcg_temp_new_i64();
10455 gen_load_fpr64(ctx
, fp0
, fs
);
10456 gen_load_fpr64(ctx
, fp1
, ft
);
10457 gen_load_fpr64(ctx
, fp2
, fr
);
10458 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10459 tcg_temp_free_i64(fp0
);
10460 tcg_temp_free_i64(fp1
);
10461 gen_store_fpr64(ctx
, fp2
, fd
);
10462 tcg_temp_free_i64(fp2
);
10468 TCGv_i64 fp0
= tcg_temp_new_i64();
10469 TCGv_i64 fp1
= tcg_temp_new_i64();
10470 TCGv_i64 fp2
= tcg_temp_new_i64();
10472 gen_load_fpr64(ctx
, fp0
, fs
);
10473 gen_load_fpr64(ctx
, fp1
, ft
);
10474 gen_load_fpr64(ctx
, fp2
, fr
);
10475 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10476 tcg_temp_free_i64(fp0
);
10477 tcg_temp_free_i64(fp1
);
10478 gen_store_fpr64(ctx
, fp2
, fd
);
10479 tcg_temp_free_i64(fp2
);
10483 MIPS_INVAL("flt3_arith");
10484 generate_exception_end(ctx
, EXCP_RI
);
10489 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10493 #if !defined(CONFIG_USER_ONLY)
10494 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10495 Therefore only check the ISA in system mode. */
10496 check_insn(ctx
, ISA_MIPS32R2
);
10498 t0
= tcg_temp_new();
10502 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10503 gen_store_gpr(t0
, rt
);
10506 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10507 gen_store_gpr(t0
, rt
);
10510 gen_helper_rdhwr_cc(t0
, cpu_env
);
10511 gen_store_gpr(t0
, rt
);
10514 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10515 gen_store_gpr(t0
, rt
);
10518 check_insn(ctx
, ISA_MIPS32R6
);
10520 /* Performance counter registers are not implemented other than
10521 * control register 0.
10523 generate_exception(ctx
, EXCP_RI
);
10525 gen_helper_rdhwr_performance(t0
, cpu_env
);
10526 gen_store_gpr(t0
, rt
);
10529 check_insn(ctx
, ISA_MIPS32R6
);
10530 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10531 gen_store_gpr(t0
, rt
);
10534 #if defined(CONFIG_USER_ONLY)
10535 tcg_gen_ld_tl(t0
, cpu_env
,
10536 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10537 gen_store_gpr(t0
, rt
);
10540 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10541 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10542 tcg_gen_ld_tl(t0
, cpu_env
,
10543 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10544 gen_store_gpr(t0
, rt
);
10546 generate_exception_end(ctx
, EXCP_RI
);
10550 default: /* Invalid */
10551 MIPS_INVAL("rdhwr");
10552 generate_exception_end(ctx
, EXCP_RI
);
10558 static inline void clear_branch_hflags(DisasContext
*ctx
)
10560 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10561 if (ctx
->bstate
== BS_NONE
) {
10562 save_cpu_state(ctx
, 0);
10564 /* it is not safe to save ctx->hflags as hflags may be changed
10565 in execution time by the instruction in delay / forbidden slot. */
10566 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10570 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10572 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10573 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10574 /* Branches completion */
10575 clear_branch_hflags(ctx
);
10576 ctx
->bstate
= BS_BRANCH
;
10577 /* FIXME: Need to clear can_do_io. */
10578 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10579 case MIPS_HFLAG_FBNSLOT
:
10580 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10583 /* unconditional branch */
10584 if (proc_hflags
& MIPS_HFLAG_BX
) {
10585 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10587 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10589 case MIPS_HFLAG_BL
:
10590 /* blikely taken case */
10591 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10593 case MIPS_HFLAG_BC
:
10594 /* Conditional branch */
10596 TCGLabel
*l1
= gen_new_label();
10598 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10599 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10601 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10604 case MIPS_HFLAG_BR
:
10605 /* unconditional branch to register */
10606 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10607 TCGv t0
= tcg_temp_new();
10608 TCGv_i32 t1
= tcg_temp_new_i32();
10610 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10611 tcg_gen_trunc_tl_i32(t1
, t0
);
10613 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10614 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10615 tcg_gen_or_i32(hflags
, hflags
, t1
);
10616 tcg_temp_free_i32(t1
);
10618 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10620 tcg_gen_mov_tl(cpu_PC
, btarget
);
10622 if (ctx
->singlestep_enabled
) {
10623 save_cpu_state(ctx
, 0);
10624 gen_helper_raise_exception_debug(cpu_env
);
10626 tcg_gen_exit_tb(0);
10629 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10635 /* Compact Branches */
10636 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10637 int rs
, int rt
, int32_t offset
)
10639 int bcond_compute
= 0;
10640 TCGv t0
= tcg_temp_new();
10641 TCGv t1
= tcg_temp_new();
10642 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10644 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10645 #ifdef MIPS_DEBUG_DISAS
10646 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10649 generate_exception_end(ctx
, EXCP_RI
);
10653 /* Load needed operands and calculate btarget */
10655 /* compact branch */
10656 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10657 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10658 gen_load_gpr(t0
, rs
);
10659 gen_load_gpr(t1
, rt
);
10661 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10662 if (rs
<= rt
&& rs
== 0) {
10663 /* OPC_BEQZALC, OPC_BNEZALC */
10664 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10667 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10668 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10669 gen_load_gpr(t0
, rs
);
10670 gen_load_gpr(t1
, rt
);
10672 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10674 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10675 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10676 if (rs
== 0 || rs
== rt
) {
10677 /* OPC_BLEZALC, OPC_BGEZALC */
10678 /* OPC_BGTZALC, OPC_BLTZALC */
10679 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10681 gen_load_gpr(t0
, rs
);
10682 gen_load_gpr(t1
, rt
);
10684 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10688 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10693 /* OPC_BEQZC, OPC_BNEZC */
10694 gen_load_gpr(t0
, rs
);
10696 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10698 /* OPC_JIC, OPC_JIALC */
10699 TCGv tbase
= tcg_temp_new();
10700 TCGv toffset
= tcg_temp_new();
10702 gen_load_gpr(tbase
, rt
);
10703 tcg_gen_movi_tl(toffset
, offset
);
10704 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10705 tcg_temp_free(tbase
);
10706 tcg_temp_free(toffset
);
10710 MIPS_INVAL("Compact branch/jump");
10711 generate_exception_end(ctx
, EXCP_RI
);
10715 if (bcond_compute
== 0) {
10716 /* Uncoditional compact branch */
10719 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10722 ctx
->hflags
|= MIPS_HFLAG_BR
;
10725 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10728 ctx
->hflags
|= MIPS_HFLAG_B
;
10731 MIPS_INVAL("Compact branch/jump");
10732 generate_exception_end(ctx
, EXCP_RI
);
10736 /* Generating branch here as compact branches don't have delay slot */
10737 gen_branch(ctx
, 4);
10739 /* Conditional compact branch */
10740 TCGLabel
*fs
= gen_new_label();
10741 save_cpu_state(ctx
, 0);
10744 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10745 if (rs
== 0 && rt
!= 0) {
10747 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10748 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10750 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10753 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10756 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10757 if (rs
== 0 && rt
!= 0) {
10759 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10760 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10762 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10765 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10768 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10769 if (rs
== 0 && rt
!= 0) {
10771 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10772 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10774 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10777 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10780 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10781 if (rs
== 0 && rt
!= 0) {
10783 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10784 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10786 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10789 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10792 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10793 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10795 /* OPC_BOVC, OPC_BNVC */
10796 TCGv t2
= tcg_temp_new();
10797 TCGv t3
= tcg_temp_new();
10798 TCGv t4
= tcg_temp_new();
10799 TCGv input_overflow
= tcg_temp_new();
10801 gen_load_gpr(t0
, rs
);
10802 gen_load_gpr(t1
, rt
);
10803 tcg_gen_ext32s_tl(t2
, t0
);
10804 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10805 tcg_gen_ext32s_tl(t3
, t1
);
10806 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10807 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10809 tcg_gen_add_tl(t4
, t2
, t3
);
10810 tcg_gen_ext32s_tl(t4
, t4
);
10811 tcg_gen_xor_tl(t2
, t2
, t3
);
10812 tcg_gen_xor_tl(t3
, t4
, t3
);
10813 tcg_gen_andc_tl(t2
, t3
, t2
);
10814 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10815 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10816 if (opc
== OPC_BOVC
) {
10818 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10821 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10823 tcg_temp_free(input_overflow
);
10827 } else if (rs
< rt
&& rs
== 0) {
10828 /* OPC_BEQZALC, OPC_BNEZALC */
10829 if (opc
== OPC_BEQZALC
) {
10831 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10834 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10837 /* OPC_BEQC, OPC_BNEC */
10838 if (opc
== OPC_BEQC
) {
10840 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10843 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10848 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10851 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10854 MIPS_INVAL("Compact conditional branch/jump");
10855 generate_exception_end(ctx
, EXCP_RI
);
10859 /* Generating branch here as compact branches don't have delay slot */
10860 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10863 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10871 /* ISA extensions (ASEs) */
10872 /* MIPS16 extension to MIPS32 */
10874 /* MIPS16 major opcodes */
10876 M16_OPC_ADDIUSP
= 0x00,
10877 M16_OPC_ADDIUPC
= 0x01,
10879 M16_OPC_JAL
= 0x03,
10880 M16_OPC_BEQZ
= 0x04,
10881 M16_OPC_BNEQZ
= 0x05,
10882 M16_OPC_SHIFT
= 0x06,
10884 M16_OPC_RRIA
= 0x08,
10885 M16_OPC_ADDIU8
= 0x09,
10886 M16_OPC_SLTI
= 0x0a,
10887 M16_OPC_SLTIU
= 0x0b,
10890 M16_OPC_CMPI
= 0x0e,
10894 M16_OPC_LWSP
= 0x12,
10896 M16_OPC_LBU
= 0x14,
10897 M16_OPC_LHU
= 0x15,
10898 M16_OPC_LWPC
= 0x16,
10899 M16_OPC_LWU
= 0x17,
10902 M16_OPC_SWSP
= 0x1a,
10904 M16_OPC_RRR
= 0x1c,
10906 M16_OPC_EXTEND
= 0x1e,
10910 /* I8 funct field */
10929 /* RR funct field */
10963 /* I64 funct field */
10971 I64_DADDIUPC
= 0x6,
10975 /* RR ry field for CNVT */
10977 RR_RY_CNVT_ZEB
= 0x0,
10978 RR_RY_CNVT_ZEH
= 0x1,
10979 RR_RY_CNVT_ZEW
= 0x2,
10980 RR_RY_CNVT_SEB
= 0x4,
10981 RR_RY_CNVT_SEH
= 0x5,
10982 RR_RY_CNVT_SEW
= 0x6,
10985 static int xlat (int r
)
10987 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10992 static void gen_mips16_save (DisasContext
*ctx
,
10993 int xsregs
, int aregs
,
10994 int do_ra
, int do_s0
, int do_s1
,
10997 TCGv t0
= tcg_temp_new();
10998 TCGv t1
= tcg_temp_new();
10999 TCGv t2
= tcg_temp_new();
11029 generate_exception_end(ctx
, EXCP_RI
);
11035 gen_base_offset_addr(ctx
, t0
, 29, 12);
11036 gen_load_gpr(t1
, 7);
11037 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11040 gen_base_offset_addr(ctx
, t0
, 29, 8);
11041 gen_load_gpr(t1
, 6);
11042 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11045 gen_base_offset_addr(ctx
, t0
, 29, 4);
11046 gen_load_gpr(t1
, 5);
11047 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11050 gen_base_offset_addr(ctx
, t0
, 29, 0);
11051 gen_load_gpr(t1
, 4);
11052 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11055 gen_load_gpr(t0
, 29);
11057 #define DECR_AND_STORE(reg) do { \
11058 tcg_gen_movi_tl(t2, -4); \
11059 gen_op_addr_add(ctx, t0, t0, t2); \
11060 gen_load_gpr(t1, reg); \
11061 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11065 DECR_AND_STORE(31);
11070 DECR_AND_STORE(30);
11073 DECR_AND_STORE(23);
11076 DECR_AND_STORE(22);
11079 DECR_AND_STORE(21);
11082 DECR_AND_STORE(20);
11085 DECR_AND_STORE(19);
11088 DECR_AND_STORE(18);
11092 DECR_AND_STORE(17);
11095 DECR_AND_STORE(16);
11125 generate_exception_end(ctx
, EXCP_RI
);
11141 #undef DECR_AND_STORE
11143 tcg_gen_movi_tl(t2
, -framesize
);
11144 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11150 static void gen_mips16_restore (DisasContext
*ctx
,
11151 int xsregs
, int aregs
,
11152 int do_ra
, int do_s0
, int do_s1
,
11156 TCGv t0
= tcg_temp_new();
11157 TCGv t1
= tcg_temp_new();
11158 TCGv t2
= tcg_temp_new();
11160 tcg_gen_movi_tl(t2
, framesize
);
11161 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11163 #define DECR_AND_LOAD(reg) do { \
11164 tcg_gen_movi_tl(t2, -4); \
11165 gen_op_addr_add(ctx, t0, t0, t2); \
11166 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11167 gen_store_gpr(t1, reg); \
11231 generate_exception_end(ctx
, EXCP_RI
);
11247 #undef DECR_AND_LOAD
11249 tcg_gen_movi_tl(t2
, framesize
);
11250 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11256 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11257 int is_64_bit
, int extended
)
11261 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11262 generate_exception_end(ctx
, EXCP_RI
);
11266 t0
= tcg_temp_new();
11268 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11269 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11271 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11277 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11280 TCGv_i32 t0
= tcg_const_i32(op
);
11281 TCGv t1
= tcg_temp_new();
11282 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11283 gen_helper_cache(cpu_env
, t1
, t0
);
11286 #if defined(TARGET_MIPS64)
11287 static void decode_i64_mips16 (DisasContext
*ctx
,
11288 int ry
, int funct
, int16_t offset
,
11293 check_insn(ctx
, ISA_MIPS3
);
11294 check_mips_64(ctx
);
11295 offset
= extended
? offset
: offset
<< 3;
11296 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11299 check_insn(ctx
, ISA_MIPS3
);
11300 check_mips_64(ctx
);
11301 offset
= extended
? offset
: offset
<< 3;
11302 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11305 check_insn(ctx
, ISA_MIPS3
);
11306 check_mips_64(ctx
);
11307 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11308 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11311 check_insn(ctx
, ISA_MIPS3
);
11312 check_mips_64(ctx
);
11313 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11314 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11317 check_insn(ctx
, ISA_MIPS3
);
11318 check_mips_64(ctx
);
11319 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11320 generate_exception_end(ctx
, EXCP_RI
);
11322 offset
= extended
? offset
: offset
<< 3;
11323 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11327 check_insn(ctx
, ISA_MIPS3
);
11328 check_mips_64(ctx
);
11329 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11330 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11333 check_insn(ctx
, ISA_MIPS3
);
11334 check_mips_64(ctx
);
11335 offset
= extended
? offset
: offset
<< 2;
11336 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11339 check_insn(ctx
, ISA_MIPS3
);
11340 check_mips_64(ctx
);
11341 offset
= extended
? offset
: offset
<< 2;
11342 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11348 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11350 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11351 int op
, rx
, ry
, funct
, sa
;
11352 int16_t imm
, offset
;
11354 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11355 op
= (ctx
->opcode
>> 11) & 0x1f;
11356 sa
= (ctx
->opcode
>> 22) & 0x1f;
11357 funct
= (ctx
->opcode
>> 8) & 0x7;
11358 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11359 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11360 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11361 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11362 | (ctx
->opcode
& 0x1f));
11364 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11367 case M16_OPC_ADDIUSP
:
11368 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11370 case M16_OPC_ADDIUPC
:
11371 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11374 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11375 /* No delay slot, so just process as a normal instruction */
11378 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11379 /* No delay slot, so just process as a normal instruction */
11381 case M16_OPC_BNEQZ
:
11382 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11383 /* No delay slot, so just process as a normal instruction */
11385 case M16_OPC_SHIFT
:
11386 switch (ctx
->opcode
& 0x3) {
11388 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11391 #if defined(TARGET_MIPS64)
11392 check_mips_64(ctx
);
11393 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11395 generate_exception_end(ctx
, EXCP_RI
);
11399 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11402 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11406 #if defined(TARGET_MIPS64)
11408 check_insn(ctx
, ISA_MIPS3
);
11409 check_mips_64(ctx
);
11410 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11414 imm
= ctx
->opcode
& 0xf;
11415 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11416 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11417 imm
= (int16_t) (imm
<< 1) >> 1;
11418 if ((ctx
->opcode
>> 4) & 0x1) {
11419 #if defined(TARGET_MIPS64)
11420 check_mips_64(ctx
);
11421 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11423 generate_exception_end(ctx
, EXCP_RI
);
11426 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11429 case M16_OPC_ADDIU8
:
11430 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11433 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11435 case M16_OPC_SLTIU
:
11436 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11441 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11444 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11447 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11450 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11453 check_insn(ctx
, ISA_MIPS32
);
11455 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11456 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11457 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11458 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11459 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11460 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11461 | (ctx
->opcode
& 0xf)) << 3;
11463 if (ctx
->opcode
& (1 << 7)) {
11464 gen_mips16_save(ctx
, xsregs
, aregs
,
11465 do_ra
, do_s0
, do_s1
,
11468 gen_mips16_restore(ctx
, xsregs
, aregs
,
11469 do_ra
, do_s0
, do_s1
,
11475 generate_exception_end(ctx
, EXCP_RI
);
11480 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11483 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11485 #if defined(TARGET_MIPS64)
11487 check_insn(ctx
, ISA_MIPS3
);
11488 check_mips_64(ctx
);
11489 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11493 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11496 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11499 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11502 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11505 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11508 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11511 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11513 #if defined(TARGET_MIPS64)
11515 check_insn(ctx
, ISA_MIPS3
);
11516 check_mips_64(ctx
);
11517 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11521 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11524 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11527 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11530 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11532 #if defined(TARGET_MIPS64)
11534 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11538 generate_exception_end(ctx
, EXCP_RI
);
11545 static inline bool is_uhi(int sdbbp_code
)
11547 #ifdef CONFIG_USER_ONLY
11550 return semihosting_enabled() && sdbbp_code
== 1;
11554 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11558 int op
, cnvt_op
, op1
, offset
;
11562 op
= (ctx
->opcode
>> 11) & 0x1f;
11563 sa
= (ctx
->opcode
>> 2) & 0x7;
11564 sa
= sa
== 0 ? 8 : sa
;
11565 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11566 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11567 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11568 op1
= offset
= ctx
->opcode
& 0x1f;
11573 case M16_OPC_ADDIUSP
:
11575 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11577 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11580 case M16_OPC_ADDIUPC
:
11581 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11584 offset
= (ctx
->opcode
& 0x7ff) << 1;
11585 offset
= (int16_t)(offset
<< 4) >> 4;
11586 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11587 /* No delay slot, so just process as a normal instruction */
11590 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11591 offset
= (((ctx
->opcode
& 0x1f) << 21)
11592 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11594 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11595 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11599 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11600 ((int8_t)ctx
->opcode
) << 1, 0);
11601 /* No delay slot, so just process as a normal instruction */
11603 case M16_OPC_BNEQZ
:
11604 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11605 ((int8_t)ctx
->opcode
) << 1, 0);
11606 /* No delay slot, so just process as a normal instruction */
11608 case M16_OPC_SHIFT
:
11609 switch (ctx
->opcode
& 0x3) {
11611 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11614 #if defined(TARGET_MIPS64)
11615 check_insn(ctx
, ISA_MIPS3
);
11616 check_mips_64(ctx
);
11617 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11619 generate_exception_end(ctx
, EXCP_RI
);
11623 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11626 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11630 #if defined(TARGET_MIPS64)
11632 check_insn(ctx
, ISA_MIPS3
);
11633 check_mips_64(ctx
);
11634 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11639 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11641 if ((ctx
->opcode
>> 4) & 1) {
11642 #if defined(TARGET_MIPS64)
11643 check_insn(ctx
, ISA_MIPS3
);
11644 check_mips_64(ctx
);
11645 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11647 generate_exception_end(ctx
, EXCP_RI
);
11650 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11654 case M16_OPC_ADDIU8
:
11656 int16_t imm
= (int8_t) ctx
->opcode
;
11658 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11663 int16_t imm
= (uint8_t) ctx
->opcode
;
11664 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11667 case M16_OPC_SLTIU
:
11669 int16_t imm
= (uint8_t) ctx
->opcode
;
11670 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11677 funct
= (ctx
->opcode
>> 8) & 0x7;
11680 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11681 ((int8_t)ctx
->opcode
) << 1, 0);
11684 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11685 ((int8_t)ctx
->opcode
) << 1, 0);
11688 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11691 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11692 ((int8_t)ctx
->opcode
) << 3);
11695 check_insn(ctx
, ISA_MIPS32
);
11697 int do_ra
= ctx
->opcode
& (1 << 6);
11698 int do_s0
= ctx
->opcode
& (1 << 5);
11699 int do_s1
= ctx
->opcode
& (1 << 4);
11700 int framesize
= ctx
->opcode
& 0xf;
11702 if (framesize
== 0) {
11705 framesize
= framesize
<< 3;
11708 if (ctx
->opcode
& (1 << 7)) {
11709 gen_mips16_save(ctx
, 0, 0,
11710 do_ra
, do_s0
, do_s1
, framesize
);
11712 gen_mips16_restore(ctx
, 0, 0,
11713 do_ra
, do_s0
, do_s1
, framesize
);
11719 int rz
= xlat(ctx
->opcode
& 0x7);
11721 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11722 ((ctx
->opcode
>> 5) & 0x7);
11723 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11727 reg32
= ctx
->opcode
& 0x1f;
11728 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11731 generate_exception_end(ctx
, EXCP_RI
);
11738 int16_t imm
= (uint8_t) ctx
->opcode
;
11740 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11745 int16_t imm
= (uint8_t) ctx
->opcode
;
11746 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11749 #if defined(TARGET_MIPS64)
11751 check_insn(ctx
, ISA_MIPS3
);
11752 check_mips_64(ctx
);
11753 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11757 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11760 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11763 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11766 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11769 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11772 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11775 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11777 #if defined (TARGET_MIPS64)
11779 check_insn(ctx
, ISA_MIPS3
);
11780 check_mips_64(ctx
);
11781 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11785 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11788 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11791 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11794 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11798 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11801 switch (ctx
->opcode
& 0x3) {
11803 mips32_op
= OPC_ADDU
;
11806 mips32_op
= OPC_SUBU
;
11808 #if defined(TARGET_MIPS64)
11810 mips32_op
= OPC_DADDU
;
11811 check_insn(ctx
, ISA_MIPS3
);
11812 check_mips_64(ctx
);
11815 mips32_op
= OPC_DSUBU
;
11816 check_insn(ctx
, ISA_MIPS3
);
11817 check_mips_64(ctx
);
11821 generate_exception_end(ctx
, EXCP_RI
);
11825 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11834 int nd
= (ctx
->opcode
>> 7) & 0x1;
11835 int link
= (ctx
->opcode
>> 6) & 0x1;
11836 int ra
= (ctx
->opcode
>> 5) & 0x1;
11839 check_insn(ctx
, ISA_MIPS32
);
11848 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11853 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11854 gen_helper_do_semihosting(cpu_env
);
11856 /* XXX: not clear which exception should be raised
11857 * when in debug mode...
11859 check_insn(ctx
, ISA_MIPS32
);
11860 generate_exception_end(ctx
, EXCP_DBp
);
11864 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11867 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11870 generate_exception_end(ctx
, EXCP_BREAK
);
11873 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11876 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11879 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11881 #if defined (TARGET_MIPS64)
11883 check_insn(ctx
, ISA_MIPS3
);
11884 check_mips_64(ctx
);
11885 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11889 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11892 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11895 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11898 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11901 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11904 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11907 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11910 check_insn(ctx
, ISA_MIPS32
);
11912 case RR_RY_CNVT_ZEB
:
11913 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11915 case RR_RY_CNVT_ZEH
:
11916 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11918 case RR_RY_CNVT_SEB
:
11919 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11921 case RR_RY_CNVT_SEH
:
11922 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11924 #if defined (TARGET_MIPS64)
11925 case RR_RY_CNVT_ZEW
:
11926 check_insn(ctx
, ISA_MIPS64
);
11927 check_mips_64(ctx
);
11928 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11930 case RR_RY_CNVT_SEW
:
11931 check_insn(ctx
, ISA_MIPS64
);
11932 check_mips_64(ctx
);
11933 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11937 generate_exception_end(ctx
, EXCP_RI
);
11942 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11944 #if defined (TARGET_MIPS64)
11946 check_insn(ctx
, ISA_MIPS3
);
11947 check_mips_64(ctx
);
11948 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11951 check_insn(ctx
, ISA_MIPS3
);
11952 check_mips_64(ctx
);
11953 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11956 check_insn(ctx
, ISA_MIPS3
);
11957 check_mips_64(ctx
);
11958 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11961 check_insn(ctx
, ISA_MIPS3
);
11962 check_mips_64(ctx
);
11963 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11967 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11970 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11973 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11976 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11978 #if defined (TARGET_MIPS64)
11980 check_insn(ctx
, ISA_MIPS3
);
11981 check_mips_64(ctx
);
11982 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11985 check_insn(ctx
, ISA_MIPS3
);
11986 check_mips_64(ctx
);
11987 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11990 check_insn(ctx
, ISA_MIPS3
);
11991 check_mips_64(ctx
);
11992 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11995 check_insn(ctx
, ISA_MIPS3
);
11996 check_mips_64(ctx
);
11997 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12001 generate_exception_end(ctx
, EXCP_RI
);
12005 case M16_OPC_EXTEND
:
12006 decode_extended_mips16_opc(env
, ctx
);
12009 #if defined(TARGET_MIPS64)
12011 funct
= (ctx
->opcode
>> 8) & 0x7;
12012 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12016 generate_exception_end(ctx
, EXCP_RI
);
12023 /* microMIPS extension to MIPS32/MIPS64 */
12026 * microMIPS32/microMIPS64 major opcodes
12028 * 1. MIPS Architecture for Programmers Volume II-B:
12029 * The microMIPS32 Instruction Set (Revision 3.05)
12031 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12033 * 2. MIPS Architecture For Programmers Volume II-A:
12034 * The MIPS64 Instruction Set (Revision 3.51)
12064 POOL32S
= 0x16, /* MIPS64 */
12065 DADDIU32
= 0x17, /* MIPS64 */
12094 /* 0x29 is reserved */
12107 /* 0x31 is reserved */
12120 SD32
= 0x36, /* MIPS64 */
12121 LD32
= 0x37, /* MIPS64 */
12123 /* 0x39 is reserved */
12139 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12149 /* POOL32A encoding of minor opcode field */
12152 /* These opcodes are distinguished only by bits 9..6; those bits are
12153 * what are recorded below. */
12190 /* The following can be distinguished by their lower 6 bits. */
12200 /* POOL32AXF encoding of minor opcode field extension */
12203 * 1. MIPS Architecture for Programmers Volume II-B:
12204 * The microMIPS32 Instruction Set (Revision 3.05)
12206 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12208 * 2. MIPS Architecture for Programmers VolumeIV-e:
12209 * The MIPS DSP Application-Specific Extension
12210 * to the microMIPS32 Architecture (Revision 2.34)
12212 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12227 /* begin of microMIPS32 DSP */
12229 /* bits 13..12 for 0x01 */
12235 /* bits 13..12 for 0x2a */
12241 /* bits 13..12 for 0x32 */
12245 /* end of microMIPS32 DSP */
12247 /* bits 15..12 for 0x2c */
12264 /* bits 15..12 for 0x34 */
12272 /* bits 15..12 for 0x3c */
12274 JR
= 0x0, /* alias */
12282 /* bits 15..12 for 0x05 */
12286 /* bits 15..12 for 0x0d */
12298 /* bits 15..12 for 0x15 */
12304 /* bits 15..12 for 0x1d */
12308 /* bits 15..12 for 0x2d */
12313 /* bits 15..12 for 0x35 */
12320 /* POOL32B encoding of minor opcode field (bits 15..12) */
12336 /* POOL32C encoding of minor opcode field (bits 15..12) */
12344 /* 0xa is reserved */
12351 /* 0x6 is reserved */
12357 /* POOL32F encoding of minor opcode field (bits 5..0) */
12360 /* These are the bit 7..6 values */
12369 /* These are the bit 8..6 values */
12394 MOVZ_FMT_05
= 0x05,
12428 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12435 /* POOL32Fxf encoding of minor opcode extension field */
12473 /* POOL32I encoding of minor opcode field (bits 25..21) */
12503 /* These overlap and are distinguished by bit16 of the instruction */
12512 /* POOL16A encoding of minor opcode field */
12519 /* POOL16B encoding of minor opcode field */
12526 /* POOL16C encoding of minor opcode field */
12546 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12566 /* POOL16D encoding of minor opcode field */
12573 /* POOL16E encoding of minor opcode field */
12580 static int mmreg (int r
)
12582 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12587 /* Used for 16-bit store instructions. */
12588 static int mmreg2 (int r
)
12590 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12595 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12596 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12597 #define uMIPS_RS2(op) uMIPS_RS(op)
12598 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12599 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12600 #define uMIPS_RS5(op) (op & 0x1f)
12602 /* Signed immediate */
12603 #define SIMM(op, start, width) \
12604 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12607 /* Zero-extended immediate */
12608 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12610 static void gen_addiur1sp(DisasContext
*ctx
)
12612 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12614 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12617 static void gen_addiur2(DisasContext
*ctx
)
12619 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12620 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12621 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12623 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12626 static void gen_addiusp(DisasContext
*ctx
)
12628 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12631 if (encoded
<= 1) {
12632 decoded
= 256 + encoded
;
12633 } else if (encoded
<= 255) {
12635 } else if (encoded
<= 509) {
12636 decoded
= encoded
- 512;
12638 decoded
= encoded
- 768;
12641 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12644 static void gen_addius5(DisasContext
*ctx
)
12646 int imm
= SIMM(ctx
->opcode
, 1, 4);
12647 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12649 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12652 static void gen_andi16(DisasContext
*ctx
)
12654 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12655 31, 32, 63, 64, 255, 32768, 65535 };
12656 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12657 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12658 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12660 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12663 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12664 int base
, int16_t offset
)
12669 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12670 generate_exception_end(ctx
, EXCP_RI
);
12674 t0
= tcg_temp_new();
12676 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12678 t1
= tcg_const_tl(reglist
);
12679 t2
= tcg_const_i32(ctx
->mem_idx
);
12681 save_cpu_state(ctx
, 1);
12684 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12687 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12689 #ifdef TARGET_MIPS64
12691 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12694 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12700 tcg_temp_free_i32(t2
);
12704 static void gen_pool16c_insn(DisasContext
*ctx
)
12706 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12707 int rs
= mmreg(ctx
->opcode
& 0x7);
12709 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12714 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12720 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12726 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12732 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12739 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12740 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12742 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12751 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12752 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12754 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12761 int reg
= ctx
->opcode
& 0x1f;
12763 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12769 int reg
= ctx
->opcode
& 0x1f;
12770 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12771 /* Let normal delay slot handling in our caller take us
12772 to the branch target. */
12777 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12778 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12782 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12783 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12787 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12791 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12794 generate_exception_end(ctx
, EXCP_BREAK
);
12797 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12798 gen_helper_do_semihosting(cpu_env
);
12800 /* XXX: not clear which exception should be raised
12801 * when in debug mode...
12803 check_insn(ctx
, ISA_MIPS32
);
12804 generate_exception_end(ctx
, EXCP_DBp
);
12807 case JRADDIUSP
+ 0:
12808 case JRADDIUSP
+ 1:
12810 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12811 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12812 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12813 /* Let normal delay slot handling in our caller take us
12814 to the branch target. */
12818 generate_exception_end(ctx
, EXCP_RI
);
12823 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12826 int rd
, rs
, re
, rt
;
12827 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12828 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12829 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12830 rd
= rd_enc
[enc_dest
];
12831 re
= re_enc
[enc_dest
];
12832 rs
= rs_rt_enc
[enc_rs
];
12833 rt
= rs_rt_enc
[enc_rt
];
12835 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12837 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12840 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12842 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12846 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12848 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12849 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12851 switch (ctx
->opcode
& 0xf) {
12853 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12856 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12860 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12861 int offset
= extract32(ctx
->opcode
, 4, 4);
12862 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12865 case R6_JRC16
: /* JRCADDIUSP */
12866 if ((ctx
->opcode
>> 4) & 1) {
12868 int imm
= extract32(ctx
->opcode
, 5, 5);
12869 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12870 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12873 int rs
= extract32(ctx
->opcode
, 5, 5);
12874 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12877 case MOVEP
... MOVEP_07
:
12878 case MOVEP_0C
... MOVEP_0F
:
12880 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12881 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12882 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12883 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12887 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12890 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12894 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12895 int offset
= extract32(ctx
->opcode
, 4, 4);
12896 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12899 case JALRC16
: /* BREAK16, SDBBP16 */
12900 switch (ctx
->opcode
& 0x3f) {
12902 case JALRC16
+ 0x20:
12904 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12909 generate_exception(ctx
, EXCP_BREAK
);
12913 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12914 gen_helper_do_semihosting(cpu_env
);
12916 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12917 generate_exception(ctx
, EXCP_RI
);
12919 generate_exception(ctx
, EXCP_DBp
);
12926 generate_exception(ctx
, EXCP_RI
);
12931 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12933 TCGv t0
= tcg_temp_new();
12934 TCGv t1
= tcg_temp_new();
12936 gen_load_gpr(t0
, base
);
12939 gen_load_gpr(t1
, index
);
12940 tcg_gen_shli_tl(t1
, t1
, 2);
12941 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12944 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12945 gen_store_gpr(t1
, rd
);
12951 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12952 int base
, int16_t offset
)
12956 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12957 generate_exception_end(ctx
, EXCP_RI
);
12961 t0
= tcg_temp_new();
12962 t1
= tcg_temp_new();
12964 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12969 generate_exception_end(ctx
, EXCP_RI
);
12972 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12973 gen_store_gpr(t1
, rd
);
12974 tcg_gen_movi_tl(t1
, 4);
12975 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12976 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12977 gen_store_gpr(t1
, rd
+1);
12980 gen_load_gpr(t1
, rd
);
12981 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12982 tcg_gen_movi_tl(t1
, 4);
12983 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12984 gen_load_gpr(t1
, rd
+1);
12985 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12987 #ifdef TARGET_MIPS64
12990 generate_exception_end(ctx
, EXCP_RI
);
12993 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12994 gen_store_gpr(t1
, rd
);
12995 tcg_gen_movi_tl(t1
, 8);
12996 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12997 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12998 gen_store_gpr(t1
, rd
+1);
13001 gen_load_gpr(t1
, rd
);
13002 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13003 tcg_gen_movi_tl(t1
, 8);
13004 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13005 gen_load_gpr(t1
, rd
+1);
13006 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13014 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13016 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13017 int minor
= (ctx
->opcode
>> 12) & 0xf;
13018 uint32_t mips32_op
;
13020 switch (extension
) {
13022 mips32_op
= OPC_TEQ
;
13025 mips32_op
= OPC_TGE
;
13028 mips32_op
= OPC_TGEU
;
13031 mips32_op
= OPC_TLT
;
13034 mips32_op
= OPC_TLTU
;
13037 mips32_op
= OPC_TNE
;
13039 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13041 #ifndef CONFIG_USER_ONLY
13044 check_cp0_enabled(ctx
);
13046 /* Treat as NOP. */
13049 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13053 check_cp0_enabled(ctx
);
13055 TCGv t0
= tcg_temp_new();
13057 gen_load_gpr(t0
, rt
);
13058 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13064 switch (minor
& 3) {
13066 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13069 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13072 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13075 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13078 goto pool32axf_invalid
;
13082 switch (minor
& 3) {
13084 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13087 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13090 goto pool32axf_invalid
;
13096 check_insn(ctx
, ISA_MIPS32R6
);
13097 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13100 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13103 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13106 mips32_op
= OPC_CLO
;
13109 mips32_op
= OPC_CLZ
;
13111 check_insn(ctx
, ISA_MIPS32
);
13112 gen_cl(ctx
, mips32_op
, rt
, rs
);
13115 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13116 gen_rdhwr(ctx
, rt
, rs
, 0);
13119 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13123 mips32_op
= OPC_MULT
;
13126 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13127 mips32_op
= OPC_MULTU
;
13130 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13131 mips32_op
= OPC_DIV
;
13134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13135 mips32_op
= OPC_DIVU
;
13138 check_insn(ctx
, ISA_MIPS32
);
13139 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13142 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13143 mips32_op
= OPC_MADD
;
13146 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13147 mips32_op
= OPC_MADDU
;
13150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13151 mips32_op
= OPC_MSUB
;
13154 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13155 mips32_op
= OPC_MSUBU
;
13157 check_insn(ctx
, ISA_MIPS32
);
13158 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13161 goto pool32axf_invalid
;
13172 generate_exception_err(ctx
, EXCP_CpU
, 2);
13175 goto pool32axf_invalid
;
13180 case JALR
: /* JALRC */
13181 case JALR_HB
: /* JALRC_HB */
13182 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13183 /* JALRC, JALRC_HB */
13184 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13186 /* JALR, JALR_HB */
13187 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13188 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13193 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13194 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13195 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13198 goto pool32axf_invalid
;
13204 check_cp0_enabled(ctx
);
13205 check_insn(ctx
, ISA_MIPS32R2
);
13206 gen_load_srsgpr(rs
, rt
);
13209 check_cp0_enabled(ctx
);
13210 check_insn(ctx
, ISA_MIPS32R2
);
13211 gen_store_srsgpr(rs
, rt
);
13214 goto pool32axf_invalid
;
13217 #ifndef CONFIG_USER_ONLY
13221 mips32_op
= OPC_TLBP
;
13224 mips32_op
= OPC_TLBR
;
13227 mips32_op
= OPC_TLBWI
;
13230 mips32_op
= OPC_TLBWR
;
13233 mips32_op
= OPC_TLBINV
;
13236 mips32_op
= OPC_TLBINVF
;
13239 mips32_op
= OPC_WAIT
;
13242 mips32_op
= OPC_DERET
;
13245 mips32_op
= OPC_ERET
;
13247 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13250 goto pool32axf_invalid
;
13256 check_cp0_enabled(ctx
);
13258 TCGv t0
= tcg_temp_new();
13260 save_cpu_state(ctx
, 1);
13261 gen_helper_di(t0
, cpu_env
);
13262 gen_store_gpr(t0
, rs
);
13263 /* Stop translation as we may have switched the execution mode */
13264 ctx
->bstate
= BS_STOP
;
13269 check_cp0_enabled(ctx
);
13271 TCGv t0
= tcg_temp_new();
13273 save_cpu_state(ctx
, 1);
13274 gen_helper_ei(t0
, cpu_env
);
13275 gen_store_gpr(t0
, rs
);
13276 /* Stop translation as we may have switched the execution mode */
13277 ctx
->bstate
= BS_STOP
;
13282 goto pool32axf_invalid
;
13292 generate_exception_end(ctx
, EXCP_SYSCALL
);
13295 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13296 gen_helper_do_semihosting(cpu_env
);
13298 check_insn(ctx
, ISA_MIPS32
);
13299 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13300 generate_exception_end(ctx
, EXCP_RI
);
13302 generate_exception_end(ctx
, EXCP_DBp
);
13307 goto pool32axf_invalid
;
13311 switch (minor
& 3) {
13313 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13316 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13319 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13322 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13325 goto pool32axf_invalid
;
13329 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13332 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13335 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13338 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13341 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13344 goto pool32axf_invalid
;
13349 MIPS_INVAL("pool32axf");
13350 generate_exception_end(ctx
, EXCP_RI
);
13355 /* Values for microMIPS fmt field. Variable-width, depending on which
13356 formats the instruction supports. */
13375 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13377 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13378 uint32_t mips32_op
;
13380 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13381 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13382 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13384 switch (extension
) {
13385 case FLOAT_1BIT_FMT(CFC1
, 0):
13386 mips32_op
= OPC_CFC1
;
13388 case FLOAT_1BIT_FMT(CTC1
, 0):
13389 mips32_op
= OPC_CTC1
;
13391 case FLOAT_1BIT_FMT(MFC1
, 0):
13392 mips32_op
= OPC_MFC1
;
13394 case FLOAT_1BIT_FMT(MTC1
, 0):
13395 mips32_op
= OPC_MTC1
;
13397 case FLOAT_1BIT_FMT(MFHC1
, 0):
13398 mips32_op
= OPC_MFHC1
;
13400 case FLOAT_1BIT_FMT(MTHC1
, 0):
13401 mips32_op
= OPC_MTHC1
;
13403 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13406 /* Reciprocal square root */
13407 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13408 mips32_op
= OPC_RSQRT_S
;
13410 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13411 mips32_op
= OPC_RSQRT_D
;
13415 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13416 mips32_op
= OPC_SQRT_S
;
13418 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13419 mips32_op
= OPC_SQRT_D
;
13423 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13424 mips32_op
= OPC_RECIP_S
;
13426 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13427 mips32_op
= OPC_RECIP_D
;
13431 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13432 mips32_op
= OPC_FLOOR_L_S
;
13434 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13435 mips32_op
= OPC_FLOOR_L_D
;
13437 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13438 mips32_op
= OPC_FLOOR_W_S
;
13440 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13441 mips32_op
= OPC_FLOOR_W_D
;
13445 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13446 mips32_op
= OPC_CEIL_L_S
;
13448 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13449 mips32_op
= OPC_CEIL_L_D
;
13451 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13452 mips32_op
= OPC_CEIL_W_S
;
13454 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13455 mips32_op
= OPC_CEIL_W_D
;
13459 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13460 mips32_op
= OPC_TRUNC_L_S
;
13462 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13463 mips32_op
= OPC_TRUNC_L_D
;
13465 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13466 mips32_op
= OPC_TRUNC_W_S
;
13468 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13469 mips32_op
= OPC_TRUNC_W_D
;
13473 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13474 mips32_op
= OPC_ROUND_L_S
;
13476 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13477 mips32_op
= OPC_ROUND_L_D
;
13479 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13480 mips32_op
= OPC_ROUND_W_S
;
13482 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13483 mips32_op
= OPC_ROUND_W_D
;
13486 /* Integer to floating-point conversion */
13487 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13488 mips32_op
= OPC_CVT_L_S
;
13490 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13491 mips32_op
= OPC_CVT_L_D
;
13493 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13494 mips32_op
= OPC_CVT_W_S
;
13496 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13497 mips32_op
= OPC_CVT_W_D
;
13500 /* Paired-foo conversions */
13501 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13502 mips32_op
= OPC_CVT_S_PL
;
13504 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13505 mips32_op
= OPC_CVT_S_PU
;
13507 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13508 mips32_op
= OPC_CVT_PW_PS
;
13510 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13511 mips32_op
= OPC_CVT_PS_PW
;
13514 /* Floating-point moves */
13515 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13516 mips32_op
= OPC_MOV_S
;
13518 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13519 mips32_op
= OPC_MOV_D
;
13521 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13522 mips32_op
= OPC_MOV_PS
;
13525 /* Absolute value */
13526 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13527 mips32_op
= OPC_ABS_S
;
13529 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13530 mips32_op
= OPC_ABS_D
;
13532 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13533 mips32_op
= OPC_ABS_PS
;
13537 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13538 mips32_op
= OPC_NEG_S
;
13540 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13541 mips32_op
= OPC_NEG_D
;
13543 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13544 mips32_op
= OPC_NEG_PS
;
13547 /* Reciprocal square root step */
13548 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13549 mips32_op
= OPC_RSQRT1_S
;
13551 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13552 mips32_op
= OPC_RSQRT1_D
;
13554 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13555 mips32_op
= OPC_RSQRT1_PS
;
13558 /* Reciprocal step */
13559 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13560 mips32_op
= OPC_RECIP1_S
;
13562 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13563 mips32_op
= OPC_RECIP1_S
;
13565 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13566 mips32_op
= OPC_RECIP1_PS
;
13569 /* Conversions from double */
13570 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13571 mips32_op
= OPC_CVT_D_S
;
13573 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13574 mips32_op
= OPC_CVT_D_W
;
13576 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13577 mips32_op
= OPC_CVT_D_L
;
13580 /* Conversions from single */
13581 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13582 mips32_op
= OPC_CVT_S_D
;
13584 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13585 mips32_op
= OPC_CVT_S_W
;
13587 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13588 mips32_op
= OPC_CVT_S_L
;
13590 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13593 /* Conditional moves on floating-point codes */
13594 case COND_FLOAT_MOV(MOVT
, 0):
13595 case COND_FLOAT_MOV(MOVT
, 1):
13596 case COND_FLOAT_MOV(MOVT
, 2):
13597 case COND_FLOAT_MOV(MOVT
, 3):
13598 case COND_FLOAT_MOV(MOVT
, 4):
13599 case COND_FLOAT_MOV(MOVT
, 5):
13600 case COND_FLOAT_MOV(MOVT
, 6):
13601 case COND_FLOAT_MOV(MOVT
, 7):
13602 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13603 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13605 case COND_FLOAT_MOV(MOVF
, 0):
13606 case COND_FLOAT_MOV(MOVF
, 1):
13607 case COND_FLOAT_MOV(MOVF
, 2):
13608 case COND_FLOAT_MOV(MOVF
, 3):
13609 case COND_FLOAT_MOV(MOVF
, 4):
13610 case COND_FLOAT_MOV(MOVF
, 5):
13611 case COND_FLOAT_MOV(MOVF
, 6):
13612 case COND_FLOAT_MOV(MOVF
, 7):
13613 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13614 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13617 MIPS_INVAL("pool32fxf");
13618 generate_exception_end(ctx
, EXCP_RI
);
13623 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13627 int rt
, rs
, rd
, rr
;
13629 uint32_t op
, minor
, mips32_op
;
13630 uint32_t cond
, fmt
, cc
;
13632 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13633 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13635 rt
= (ctx
->opcode
>> 21) & 0x1f;
13636 rs
= (ctx
->opcode
>> 16) & 0x1f;
13637 rd
= (ctx
->opcode
>> 11) & 0x1f;
13638 rr
= (ctx
->opcode
>> 6) & 0x1f;
13639 imm
= (int16_t) ctx
->opcode
;
13641 op
= (ctx
->opcode
>> 26) & 0x3f;
13644 minor
= ctx
->opcode
& 0x3f;
13647 minor
= (ctx
->opcode
>> 6) & 0xf;
13650 mips32_op
= OPC_SLL
;
13653 mips32_op
= OPC_SRA
;
13656 mips32_op
= OPC_SRL
;
13659 mips32_op
= OPC_ROTR
;
13661 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13664 check_insn(ctx
, ISA_MIPS32R6
);
13665 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13668 check_insn(ctx
, ISA_MIPS32R6
);
13669 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13672 check_insn(ctx
, ISA_MIPS32R6
);
13673 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13676 goto pool32a_invalid
;
13680 minor
= (ctx
->opcode
>> 6) & 0xf;
13684 mips32_op
= OPC_ADD
;
13687 mips32_op
= OPC_ADDU
;
13690 mips32_op
= OPC_SUB
;
13693 mips32_op
= OPC_SUBU
;
13696 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13697 mips32_op
= OPC_MUL
;
13699 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13703 mips32_op
= OPC_SLLV
;
13706 mips32_op
= OPC_SRLV
;
13709 mips32_op
= OPC_SRAV
;
13712 mips32_op
= OPC_ROTRV
;
13714 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13716 /* Logical operations */
13718 mips32_op
= OPC_AND
;
13721 mips32_op
= OPC_OR
;
13724 mips32_op
= OPC_NOR
;
13727 mips32_op
= OPC_XOR
;
13729 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13731 /* Set less than */
13733 mips32_op
= OPC_SLT
;
13736 mips32_op
= OPC_SLTU
;
13738 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13741 goto pool32a_invalid
;
13745 minor
= (ctx
->opcode
>> 6) & 0xf;
13747 /* Conditional moves */
13748 case MOVN
: /* MUL */
13749 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13751 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13754 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13757 case MOVZ
: /* MUH */
13758 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13760 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13763 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13767 check_insn(ctx
, ISA_MIPS32R6
);
13768 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13771 check_insn(ctx
, ISA_MIPS32R6
);
13772 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13774 case LWXS
: /* DIV */
13775 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13777 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13780 gen_ldxs(ctx
, rs
, rt
, rd
);
13784 check_insn(ctx
, ISA_MIPS32R6
);
13785 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13788 check_insn(ctx
, ISA_MIPS32R6
);
13789 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13792 check_insn(ctx
, ISA_MIPS32R6
);
13793 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13796 goto pool32a_invalid
;
13800 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13803 check_insn(ctx
, ISA_MIPS32R6
);
13804 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13805 extract32(ctx
->opcode
, 9, 2));
13808 check_insn(ctx
, ISA_MIPS32R6
);
13809 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13810 extract32(ctx
->opcode
, 9, 2));
13813 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13816 gen_pool32axf(env
, ctx
, rt
, rs
);
13819 generate_exception_end(ctx
, EXCP_BREAK
);
13822 check_insn(ctx
, ISA_MIPS32R6
);
13823 generate_exception_end(ctx
, EXCP_RI
);
13827 MIPS_INVAL("pool32a");
13828 generate_exception_end(ctx
, EXCP_RI
);
13833 minor
= (ctx
->opcode
>> 12) & 0xf;
13836 check_cp0_enabled(ctx
);
13837 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13838 gen_cache_operation(ctx
, rt
, rs
, imm
);
13843 /* COP2: Not implemented. */
13844 generate_exception_err(ctx
, EXCP_CpU
, 2);
13846 #ifdef TARGET_MIPS64
13849 check_insn(ctx
, ISA_MIPS3
);
13850 check_mips_64(ctx
);
13855 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13857 #ifdef TARGET_MIPS64
13860 check_insn(ctx
, ISA_MIPS3
);
13861 check_mips_64(ctx
);
13866 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13869 MIPS_INVAL("pool32b");
13870 generate_exception_end(ctx
, EXCP_RI
);
13875 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13876 minor
= ctx
->opcode
& 0x3f;
13877 check_cp1_enabled(ctx
);
13880 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13881 mips32_op
= OPC_ALNV_PS
;
13884 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13885 mips32_op
= OPC_MADD_S
;
13888 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13889 mips32_op
= OPC_MADD_D
;
13892 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13893 mips32_op
= OPC_MADD_PS
;
13896 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13897 mips32_op
= OPC_MSUB_S
;
13900 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13901 mips32_op
= OPC_MSUB_D
;
13904 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13905 mips32_op
= OPC_MSUB_PS
;
13908 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13909 mips32_op
= OPC_NMADD_S
;
13912 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13913 mips32_op
= OPC_NMADD_D
;
13916 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13917 mips32_op
= OPC_NMADD_PS
;
13920 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13921 mips32_op
= OPC_NMSUB_S
;
13924 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13925 mips32_op
= OPC_NMSUB_D
;
13928 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13929 mips32_op
= OPC_NMSUB_PS
;
13931 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13933 case CABS_COND_FMT
:
13934 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13935 cond
= (ctx
->opcode
>> 6) & 0xf;
13936 cc
= (ctx
->opcode
>> 13) & 0x7;
13937 fmt
= (ctx
->opcode
>> 10) & 0x3;
13940 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13943 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13946 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13949 goto pool32f_invalid
;
13953 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13954 cond
= (ctx
->opcode
>> 6) & 0xf;
13955 cc
= (ctx
->opcode
>> 13) & 0x7;
13956 fmt
= (ctx
->opcode
>> 10) & 0x3;
13959 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13962 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13965 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13968 goto pool32f_invalid
;
13972 check_insn(ctx
, ISA_MIPS32R6
);
13973 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13976 check_insn(ctx
, ISA_MIPS32R6
);
13977 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13980 gen_pool32fxf(ctx
, rt
, rs
);
13984 switch ((ctx
->opcode
>> 6) & 0x7) {
13986 mips32_op
= OPC_PLL_PS
;
13989 mips32_op
= OPC_PLU_PS
;
13992 mips32_op
= OPC_PUL_PS
;
13995 mips32_op
= OPC_PUU_PS
;
13998 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13999 mips32_op
= OPC_CVT_PS_S
;
14001 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14004 goto pool32f_invalid
;
14008 check_insn(ctx
, ISA_MIPS32R6
);
14009 switch ((ctx
->opcode
>> 9) & 0x3) {
14011 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14014 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14017 goto pool32f_invalid
;
14022 switch ((ctx
->opcode
>> 6) & 0x7) {
14024 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14025 mips32_op
= OPC_LWXC1
;
14028 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14029 mips32_op
= OPC_SWXC1
;
14032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14033 mips32_op
= OPC_LDXC1
;
14036 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14037 mips32_op
= OPC_SDXC1
;
14040 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14041 mips32_op
= OPC_LUXC1
;
14044 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14045 mips32_op
= OPC_SUXC1
;
14047 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14050 goto pool32f_invalid
;
14054 check_insn(ctx
, ISA_MIPS32R6
);
14055 switch ((ctx
->opcode
>> 9) & 0x3) {
14057 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14060 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14063 goto pool32f_invalid
;
14068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14069 fmt
= (ctx
->opcode
>> 9) & 0x3;
14070 switch ((ctx
->opcode
>> 6) & 0x7) {
14074 mips32_op
= OPC_RSQRT2_S
;
14077 mips32_op
= OPC_RSQRT2_D
;
14080 mips32_op
= OPC_RSQRT2_PS
;
14083 goto pool32f_invalid
;
14089 mips32_op
= OPC_RECIP2_S
;
14092 mips32_op
= OPC_RECIP2_D
;
14095 mips32_op
= OPC_RECIP2_PS
;
14098 goto pool32f_invalid
;
14102 mips32_op
= OPC_ADDR_PS
;
14105 mips32_op
= OPC_MULR_PS
;
14107 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14110 goto pool32f_invalid
;
14114 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14115 cc
= (ctx
->opcode
>> 13) & 0x7;
14116 fmt
= (ctx
->opcode
>> 9) & 0x3;
14117 switch ((ctx
->opcode
>> 6) & 0x7) {
14118 case MOVF_FMT
: /* RINT_FMT */
14119 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14123 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14126 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14129 goto pool32f_invalid
;
14135 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14138 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14142 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14145 goto pool32f_invalid
;
14149 case MOVT_FMT
: /* CLASS_FMT */
14150 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14154 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14157 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14160 goto pool32f_invalid
;
14166 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14169 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14173 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14176 goto pool32f_invalid
;
14181 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14184 goto pool32f_invalid
;
14187 #define FINSN_3ARG_SDPS(prfx) \
14188 switch ((ctx->opcode >> 8) & 0x3) { \
14190 mips32_op = OPC_##prfx##_S; \
14193 mips32_op = OPC_##prfx##_D; \
14195 case FMT_SDPS_PS: \
14197 mips32_op = OPC_##prfx##_PS; \
14200 goto pool32f_invalid; \
14203 check_insn(ctx
, ISA_MIPS32R6
);
14204 switch ((ctx
->opcode
>> 9) & 0x3) {
14206 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14209 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14212 goto pool32f_invalid
;
14216 check_insn(ctx
, ISA_MIPS32R6
);
14217 switch ((ctx
->opcode
>> 9) & 0x3) {
14219 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14222 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14225 goto pool32f_invalid
;
14229 /* regular FP ops */
14230 switch ((ctx
->opcode
>> 6) & 0x3) {
14232 FINSN_3ARG_SDPS(ADD
);
14235 FINSN_3ARG_SDPS(SUB
);
14238 FINSN_3ARG_SDPS(MUL
);
14241 fmt
= (ctx
->opcode
>> 8) & 0x3;
14243 mips32_op
= OPC_DIV_D
;
14244 } else if (fmt
== 0) {
14245 mips32_op
= OPC_DIV_S
;
14247 goto pool32f_invalid
;
14251 goto pool32f_invalid
;
14256 switch ((ctx
->opcode
>> 6) & 0x7) {
14257 case MOVN_FMT
: /* SELNEZ_FMT */
14258 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14260 switch ((ctx
->opcode
>> 9) & 0x3) {
14262 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14265 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14268 goto pool32f_invalid
;
14272 FINSN_3ARG_SDPS(MOVN
);
14276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14277 FINSN_3ARG_SDPS(MOVN
);
14279 case MOVZ_FMT
: /* SELEQZ_FMT */
14280 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14282 switch ((ctx
->opcode
>> 9) & 0x3) {
14284 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14287 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14290 goto pool32f_invalid
;
14294 FINSN_3ARG_SDPS(MOVZ
);
14298 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14299 FINSN_3ARG_SDPS(MOVZ
);
14302 check_insn(ctx
, ISA_MIPS32R6
);
14303 switch ((ctx
->opcode
>> 9) & 0x3) {
14305 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14308 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14311 goto pool32f_invalid
;
14315 check_insn(ctx
, ISA_MIPS32R6
);
14316 switch ((ctx
->opcode
>> 9) & 0x3) {
14318 mips32_op
= OPC_MADDF_S
;
14321 mips32_op
= OPC_MADDF_D
;
14324 goto pool32f_invalid
;
14328 check_insn(ctx
, ISA_MIPS32R6
);
14329 switch ((ctx
->opcode
>> 9) & 0x3) {
14331 mips32_op
= OPC_MSUBF_S
;
14334 mips32_op
= OPC_MSUBF_D
;
14337 goto pool32f_invalid
;
14341 goto pool32f_invalid
;
14345 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14349 MIPS_INVAL("pool32f");
14350 generate_exception_end(ctx
, EXCP_RI
);
14354 generate_exception_err(ctx
, EXCP_CpU
, 1);
14358 minor
= (ctx
->opcode
>> 21) & 0x1f;
14361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14362 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14366 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14367 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14371 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14372 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14375 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14376 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14379 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14380 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14381 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14385 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14386 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14390 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14393 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14394 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14398 case TLTI
: /* BC1EQZC */
14399 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14401 check_cp1_enabled(ctx
);
14402 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14405 mips32_op
= OPC_TLTI
;
14409 case TGEI
: /* BC1NEZC */
14410 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14412 check_cp1_enabled(ctx
);
14413 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14416 mips32_op
= OPC_TGEI
;
14421 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14422 mips32_op
= OPC_TLTIU
;
14425 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14426 mips32_op
= OPC_TGEIU
;
14428 case TNEI
: /* SYNCI */
14429 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14431 /* Break the TB to be able to sync copied instructions
14433 ctx
->bstate
= BS_STOP
;
14436 mips32_op
= OPC_TNEI
;
14441 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14442 mips32_op
= OPC_TEQI
;
14444 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14449 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14450 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14451 4, rs
, 0, imm
<< 1, 0);
14452 /* Compact branches don't have a delay slot, so just let
14453 the normal delay slot handling take us to the branch
14457 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14458 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14461 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14462 /* Break the TB to be able to sync copied instructions
14464 ctx
->bstate
= BS_STOP
;
14468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14469 /* COP2: Not implemented. */
14470 generate_exception_err(ctx
, EXCP_CpU
, 2);
14473 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14474 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14477 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14478 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14481 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14482 mips32_op
= OPC_BC1FANY4
;
14485 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14486 mips32_op
= OPC_BC1TANY4
;
14489 check_insn(ctx
, ASE_MIPS3D
);
14492 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14493 check_cp1_enabled(ctx
);
14494 gen_compute_branch1(ctx
, mips32_op
,
14495 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14497 generate_exception_err(ctx
, EXCP_CpU
, 1);
14502 /* MIPS DSP: not implemented */
14505 MIPS_INVAL("pool32i");
14506 generate_exception_end(ctx
, EXCP_RI
);
14511 minor
= (ctx
->opcode
>> 12) & 0xf;
14512 offset
= sextract32(ctx
->opcode
, 0,
14513 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14517 mips32_op
= OPC_LWL
;
14520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14521 mips32_op
= OPC_SWL
;
14524 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14525 mips32_op
= OPC_LWR
;
14528 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14529 mips32_op
= OPC_SWR
;
14531 #if defined(TARGET_MIPS64)
14533 check_insn(ctx
, ISA_MIPS3
);
14534 check_mips_64(ctx
);
14535 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14536 mips32_op
= OPC_LDL
;
14539 check_insn(ctx
, ISA_MIPS3
);
14540 check_mips_64(ctx
);
14541 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14542 mips32_op
= OPC_SDL
;
14545 check_insn(ctx
, ISA_MIPS3
);
14546 check_mips_64(ctx
);
14547 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14548 mips32_op
= OPC_LDR
;
14551 check_insn(ctx
, ISA_MIPS3
);
14552 check_mips_64(ctx
);
14553 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14554 mips32_op
= OPC_SDR
;
14557 check_insn(ctx
, ISA_MIPS3
);
14558 check_mips_64(ctx
);
14559 mips32_op
= OPC_LWU
;
14562 check_insn(ctx
, ISA_MIPS3
);
14563 check_mips_64(ctx
);
14564 mips32_op
= OPC_LLD
;
14568 mips32_op
= OPC_LL
;
14571 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14574 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14577 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14579 #if defined(TARGET_MIPS64)
14581 check_insn(ctx
, ISA_MIPS3
);
14582 check_mips_64(ctx
);
14583 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14587 /* Treat as no-op */
14588 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14589 /* hint codes 24-31 are reserved and signal RI */
14590 generate_exception(ctx
, EXCP_RI
);
14594 MIPS_INVAL("pool32c");
14595 generate_exception_end(ctx
, EXCP_RI
);
14599 case ADDI32
: /* AUI, LUI */
14600 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14602 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14605 mips32_op
= OPC_ADDI
;
14610 mips32_op
= OPC_ADDIU
;
14612 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14615 /* Logical operations */
14617 mips32_op
= OPC_ORI
;
14620 mips32_op
= OPC_XORI
;
14623 mips32_op
= OPC_ANDI
;
14625 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14628 /* Set less than immediate */
14630 mips32_op
= OPC_SLTI
;
14633 mips32_op
= OPC_SLTIU
;
14635 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14638 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14639 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14640 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14641 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14643 case JALS32
: /* BOVC, BEQC, BEQZALC */
14644 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14647 mips32_op
= OPC_BOVC
;
14648 } else if (rs
< rt
&& rs
== 0) {
14650 mips32_op
= OPC_BEQZALC
;
14653 mips32_op
= OPC_BEQC
;
14655 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14658 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14659 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14660 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14663 case BEQ32
: /* BC */
14664 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14666 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14667 sextract32(ctx
->opcode
<< 1, 0, 27));
14670 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14673 case BNE32
: /* BALC */
14674 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14676 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14677 sextract32(ctx
->opcode
<< 1, 0, 27));
14680 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14683 case J32
: /* BGTZC, BLTZC, BLTC */
14684 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14685 if (rs
== 0 && rt
!= 0) {
14687 mips32_op
= OPC_BGTZC
;
14688 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14690 mips32_op
= OPC_BLTZC
;
14693 mips32_op
= OPC_BLTC
;
14695 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14698 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14699 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14702 case JAL32
: /* BLEZC, BGEZC, BGEC */
14703 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14704 if (rs
== 0 && rt
!= 0) {
14706 mips32_op
= OPC_BLEZC
;
14707 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14709 mips32_op
= OPC_BGEZC
;
14712 mips32_op
= OPC_BGEC
;
14714 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14717 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14718 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14719 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14722 /* Floating point (COP1) */
14724 mips32_op
= OPC_LWC1
;
14727 mips32_op
= OPC_LDC1
;
14730 mips32_op
= OPC_SWC1
;
14733 mips32_op
= OPC_SDC1
;
14735 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14737 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14738 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14739 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14740 switch ((ctx
->opcode
>> 16) & 0x1f) {
14741 case ADDIUPC_00
... ADDIUPC_07
:
14742 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14745 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14748 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14750 case LWPC_08
... LWPC_0F
:
14751 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14754 generate_exception(ctx
, EXCP_RI
);
14759 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14760 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14762 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14765 case BNVC
: /* BNEC, BNEZALC */
14766 check_insn(ctx
, ISA_MIPS32R6
);
14769 mips32_op
= OPC_BNVC
;
14770 } else if (rs
< rt
&& rs
== 0) {
14772 mips32_op
= OPC_BNEZALC
;
14775 mips32_op
= OPC_BNEC
;
14777 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14779 case R6_BNEZC
: /* JIALC */
14780 check_insn(ctx
, ISA_MIPS32R6
);
14783 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14784 sextract32(ctx
->opcode
<< 1, 0, 22));
14787 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14790 case R6_BEQZC
: /* JIC */
14791 check_insn(ctx
, ISA_MIPS32R6
);
14794 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14795 sextract32(ctx
->opcode
<< 1, 0, 22));
14798 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14801 case BLEZALC
: /* BGEZALC, BGEUC */
14802 check_insn(ctx
, ISA_MIPS32R6
);
14803 if (rs
== 0 && rt
!= 0) {
14805 mips32_op
= OPC_BLEZALC
;
14806 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14808 mips32_op
= OPC_BGEZALC
;
14811 mips32_op
= OPC_BGEUC
;
14813 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14815 case BGTZALC
: /* BLTZALC, BLTUC */
14816 check_insn(ctx
, ISA_MIPS32R6
);
14817 if (rs
== 0 && rt
!= 0) {
14819 mips32_op
= OPC_BGTZALC
;
14820 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14822 mips32_op
= OPC_BLTZALC
;
14825 mips32_op
= OPC_BLTUC
;
14827 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14829 /* Loads and stores */
14831 mips32_op
= OPC_LB
;
14834 mips32_op
= OPC_LBU
;
14837 mips32_op
= OPC_LH
;
14840 mips32_op
= OPC_LHU
;
14843 mips32_op
= OPC_LW
;
14845 #ifdef TARGET_MIPS64
14847 check_insn(ctx
, ISA_MIPS3
);
14848 check_mips_64(ctx
);
14849 mips32_op
= OPC_LD
;
14852 check_insn(ctx
, ISA_MIPS3
);
14853 check_mips_64(ctx
);
14854 mips32_op
= OPC_SD
;
14858 mips32_op
= OPC_SB
;
14861 mips32_op
= OPC_SH
;
14864 mips32_op
= OPC_SW
;
14867 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14870 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14873 generate_exception_end(ctx
, EXCP_RI
);
14878 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14882 /* make sure instructions are on a halfword boundary */
14883 if (ctx
->pc
& 0x1) {
14884 env
->CP0_BadVAddr
= ctx
->pc
;
14885 generate_exception_end(ctx
, EXCP_AdEL
);
14889 op
= (ctx
->opcode
>> 10) & 0x3f;
14890 /* Enforce properly-sized instructions in a delay slot */
14891 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14892 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14894 /* POOL32A, POOL32B, POOL32I, POOL32C */
14896 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14898 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14900 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14902 /* LB32, LH32, LWC132, LDC132, LW32 */
14903 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14904 generate_exception_end(ctx
, EXCP_RI
);
14909 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14911 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14913 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14914 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14915 generate_exception_end(ctx
, EXCP_RI
);
14925 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14926 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14927 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14930 switch (ctx
->opcode
& 0x1) {
14938 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14939 /* In the Release 6 the register number location in
14940 * the instruction encoding has changed.
14942 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14944 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14950 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14951 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14952 int amount
= (ctx
->opcode
>> 1) & 0x7;
14954 amount
= amount
== 0 ? 8 : amount
;
14956 switch (ctx
->opcode
& 0x1) {
14965 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14969 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14970 gen_pool16c_r6_insn(ctx
);
14972 gen_pool16c_insn(ctx
);
14977 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14978 int rb
= 28; /* GP */
14979 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14981 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14985 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14986 if (ctx
->opcode
& 1) {
14987 generate_exception_end(ctx
, EXCP_RI
);
14990 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14991 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14992 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14993 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14998 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14999 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15000 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15001 offset
= (offset
== 0xf ? -1 : offset
);
15003 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15008 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15009 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15010 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15012 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15017 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15018 int rb
= 29; /* SP */
15019 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15021 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15026 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15027 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15028 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15030 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15035 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15036 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15037 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15039 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15044 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15045 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15046 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15048 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15053 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15054 int rb
= 29; /* SP */
15055 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15057 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15062 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15063 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15064 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15066 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15071 int rd
= uMIPS_RD5(ctx
->opcode
);
15072 int rs
= uMIPS_RS5(ctx
->opcode
);
15074 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15081 switch (ctx
->opcode
& 0x1) {
15091 switch (ctx
->opcode
& 0x1) {
15096 gen_addiur1sp(ctx
);
15100 case B16
: /* BC16 */
15101 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15102 sextract32(ctx
->opcode
, 0, 10) << 1,
15103 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15105 case BNEZ16
: /* BNEZC16 */
15106 case BEQZ16
: /* BEQZC16 */
15107 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15108 mmreg(uMIPS_RD(ctx
->opcode
)),
15109 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15110 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15115 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15116 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15118 imm
= (imm
== 0x7f ? -1 : imm
);
15119 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15125 generate_exception_end(ctx
, EXCP_RI
);
15128 decode_micromips32_opc(env
, ctx
);
15135 /* SmartMIPS extension to MIPS32 */
15137 #if defined(TARGET_MIPS64)
15139 /* MDMX extension to MIPS64 */
15143 /* MIPSDSP functions. */
15144 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15145 int rd
, int base
, int offset
)
15150 t0
= tcg_temp_new();
15153 gen_load_gpr(t0
, offset
);
15154 } else if (offset
== 0) {
15155 gen_load_gpr(t0
, base
);
15157 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15162 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15163 gen_store_gpr(t0
, rd
);
15166 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15167 gen_store_gpr(t0
, rd
);
15170 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15171 gen_store_gpr(t0
, rd
);
15173 #if defined(TARGET_MIPS64)
15175 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15176 gen_store_gpr(t0
, rd
);
15183 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15184 int ret
, int v1
, int v2
)
15190 /* Treat as NOP. */
15194 v1_t
= tcg_temp_new();
15195 v2_t
= tcg_temp_new();
15197 gen_load_gpr(v1_t
, v1
);
15198 gen_load_gpr(v2_t
, v2
);
15201 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15202 case OPC_MULT_G_2E
:
15206 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15208 case OPC_ADDUH_R_QB
:
15209 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15212 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15214 case OPC_ADDQH_R_PH
:
15215 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15218 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15220 case OPC_ADDQH_R_W
:
15221 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15224 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15226 case OPC_SUBUH_R_QB
:
15227 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15230 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15232 case OPC_SUBQH_R_PH
:
15233 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15236 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15238 case OPC_SUBQH_R_W
:
15239 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15243 case OPC_ABSQ_S_PH_DSP
:
15245 case OPC_ABSQ_S_QB
:
15247 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15249 case OPC_ABSQ_S_PH
:
15251 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15255 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15257 case OPC_PRECEQ_W_PHL
:
15259 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15260 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15262 case OPC_PRECEQ_W_PHR
:
15264 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15265 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15266 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15268 case OPC_PRECEQU_PH_QBL
:
15270 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15272 case OPC_PRECEQU_PH_QBR
:
15274 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15276 case OPC_PRECEQU_PH_QBLA
:
15278 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15280 case OPC_PRECEQU_PH_QBRA
:
15282 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15284 case OPC_PRECEU_PH_QBL
:
15286 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15288 case OPC_PRECEU_PH_QBR
:
15290 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15292 case OPC_PRECEU_PH_QBLA
:
15294 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15296 case OPC_PRECEU_PH_QBRA
:
15298 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15302 case OPC_ADDU_QB_DSP
:
15306 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15308 case OPC_ADDQ_S_PH
:
15310 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15314 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15318 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15320 case OPC_ADDU_S_QB
:
15322 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15326 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15328 case OPC_ADDU_S_PH
:
15330 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15334 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15336 case OPC_SUBQ_S_PH
:
15338 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15342 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15346 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15348 case OPC_SUBU_S_QB
:
15350 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15354 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 case OPC_SUBU_S_PH
:
15358 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15362 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15366 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15370 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15372 case OPC_RADDU_W_QB
:
15374 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15378 case OPC_CMPU_EQ_QB_DSP
:
15380 case OPC_PRECR_QB_PH
:
15382 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15384 case OPC_PRECRQ_QB_PH
:
15386 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15388 case OPC_PRECR_SRA_PH_W
:
15391 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15392 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15394 tcg_temp_free_i32(sa_t
);
15397 case OPC_PRECR_SRA_R_PH_W
:
15400 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15401 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15403 tcg_temp_free_i32(sa_t
);
15406 case OPC_PRECRQ_PH_W
:
15408 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15410 case OPC_PRECRQ_RS_PH_W
:
15412 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15414 case OPC_PRECRQU_S_QB_PH
:
15416 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15420 #ifdef TARGET_MIPS64
15421 case OPC_ABSQ_S_QH_DSP
:
15423 case OPC_PRECEQ_L_PWL
:
15425 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15427 case OPC_PRECEQ_L_PWR
:
15429 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15431 case OPC_PRECEQ_PW_QHL
:
15433 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15435 case OPC_PRECEQ_PW_QHR
:
15437 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15439 case OPC_PRECEQ_PW_QHLA
:
15441 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15443 case OPC_PRECEQ_PW_QHRA
:
15445 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15447 case OPC_PRECEQU_QH_OBL
:
15449 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15451 case OPC_PRECEQU_QH_OBR
:
15453 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15455 case OPC_PRECEQU_QH_OBLA
:
15457 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15459 case OPC_PRECEQU_QH_OBRA
:
15461 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15463 case OPC_PRECEU_QH_OBL
:
15465 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15467 case OPC_PRECEU_QH_OBR
:
15469 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15471 case OPC_PRECEU_QH_OBLA
:
15473 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15475 case OPC_PRECEU_QH_OBRA
:
15477 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15479 case OPC_ABSQ_S_OB
:
15481 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15483 case OPC_ABSQ_S_PW
:
15485 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15487 case OPC_ABSQ_S_QH
:
15489 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15493 case OPC_ADDU_OB_DSP
:
15495 case OPC_RADDU_L_OB
:
15497 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15501 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15503 case OPC_SUBQ_S_PW
:
15505 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15509 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15511 case OPC_SUBQ_S_QH
:
15513 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15517 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15519 case OPC_SUBU_S_OB
:
15521 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15525 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15527 case OPC_SUBU_S_QH
:
15529 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15533 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15535 case OPC_SUBUH_R_OB
:
15537 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15541 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15543 case OPC_ADDQ_S_PW
:
15545 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15549 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15551 case OPC_ADDQ_S_QH
:
15553 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15557 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15559 case OPC_ADDU_S_OB
:
15561 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15565 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15567 case OPC_ADDU_S_QH
:
15569 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15573 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15575 case OPC_ADDUH_R_OB
:
15577 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15581 case OPC_CMPU_EQ_OB_DSP
:
15583 case OPC_PRECR_OB_QH
:
15585 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15587 case OPC_PRECR_SRA_QH_PW
:
15590 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15591 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15592 tcg_temp_free_i32(ret_t
);
15595 case OPC_PRECR_SRA_R_QH_PW
:
15598 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15599 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15600 tcg_temp_free_i32(sa_v
);
15603 case OPC_PRECRQ_OB_QH
:
15605 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15607 case OPC_PRECRQ_PW_L
:
15609 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15611 case OPC_PRECRQ_QH_PW
:
15613 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15615 case OPC_PRECRQ_RS_QH_PW
:
15617 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15619 case OPC_PRECRQU_S_OB_QH
:
15621 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15628 tcg_temp_free(v1_t
);
15629 tcg_temp_free(v2_t
);
15632 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15633 int ret
, int v1
, int v2
)
15641 /* Treat as NOP. */
15645 t0
= tcg_temp_new();
15646 v1_t
= tcg_temp_new();
15647 v2_t
= tcg_temp_new();
15649 tcg_gen_movi_tl(t0
, v1
);
15650 gen_load_gpr(v1_t
, v1
);
15651 gen_load_gpr(v2_t
, v2
);
15654 case OPC_SHLL_QB_DSP
:
15656 op2
= MASK_SHLL_QB(ctx
->opcode
);
15660 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15664 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15668 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15672 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15674 case OPC_SHLL_S_PH
:
15676 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15678 case OPC_SHLLV_S_PH
:
15680 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15684 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15686 case OPC_SHLLV_S_W
:
15688 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15692 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15696 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15700 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15704 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15708 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15710 case OPC_SHRA_R_QB
:
15712 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15716 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15718 case OPC_SHRAV_R_QB
:
15720 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15724 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15726 case OPC_SHRA_R_PH
:
15728 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15732 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15734 case OPC_SHRAV_R_PH
:
15736 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15740 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15742 case OPC_SHRAV_R_W
:
15744 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15746 default: /* Invalid */
15747 MIPS_INVAL("MASK SHLL.QB");
15748 generate_exception_end(ctx
, EXCP_RI
);
15753 #ifdef TARGET_MIPS64
15754 case OPC_SHLL_OB_DSP
:
15755 op2
= MASK_SHLL_OB(ctx
->opcode
);
15759 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15763 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15765 case OPC_SHLL_S_PW
:
15767 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15769 case OPC_SHLLV_S_PW
:
15771 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15775 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15779 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15783 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15787 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15789 case OPC_SHLL_S_QH
:
15791 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15793 case OPC_SHLLV_S_QH
:
15795 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15799 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15803 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15805 case OPC_SHRA_R_OB
:
15807 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15809 case OPC_SHRAV_R_OB
:
15811 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15815 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15819 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15821 case OPC_SHRA_R_PW
:
15823 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15825 case OPC_SHRAV_R_PW
:
15827 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15831 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15835 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15837 case OPC_SHRA_R_QH
:
15839 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15841 case OPC_SHRAV_R_QH
:
15843 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15847 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15851 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15855 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15859 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15861 default: /* Invalid */
15862 MIPS_INVAL("MASK SHLL.OB");
15863 generate_exception_end(ctx
, EXCP_RI
);
15871 tcg_temp_free(v1_t
);
15872 tcg_temp_free(v2_t
);
15875 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15876 int ret
, int v1
, int v2
, int check_ret
)
15882 if ((ret
== 0) && (check_ret
== 1)) {
15883 /* Treat as NOP. */
15887 t0
= tcg_temp_new_i32();
15888 v1_t
= tcg_temp_new();
15889 v2_t
= tcg_temp_new();
15891 tcg_gen_movi_i32(t0
, ret
);
15892 gen_load_gpr(v1_t
, v1
);
15893 gen_load_gpr(v2_t
, v2
);
15896 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15897 * the same mask and op1. */
15898 case OPC_MULT_G_2E
:
15902 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15905 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15908 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15910 case OPC_MULQ_RS_W
:
15911 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15915 case OPC_DPA_W_PH_DSP
:
15917 case OPC_DPAU_H_QBL
:
15919 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15921 case OPC_DPAU_H_QBR
:
15923 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15925 case OPC_DPSU_H_QBL
:
15927 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15929 case OPC_DPSU_H_QBR
:
15931 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15935 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15937 case OPC_DPAX_W_PH
:
15939 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15941 case OPC_DPAQ_S_W_PH
:
15943 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15945 case OPC_DPAQX_S_W_PH
:
15947 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15949 case OPC_DPAQX_SA_W_PH
:
15951 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15955 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15957 case OPC_DPSX_W_PH
:
15959 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15961 case OPC_DPSQ_S_W_PH
:
15963 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15965 case OPC_DPSQX_S_W_PH
:
15967 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15969 case OPC_DPSQX_SA_W_PH
:
15971 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15973 case OPC_MULSAQ_S_W_PH
:
15975 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15977 case OPC_DPAQ_SA_L_W
:
15979 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15981 case OPC_DPSQ_SA_L_W
:
15983 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15985 case OPC_MAQ_S_W_PHL
:
15987 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15989 case OPC_MAQ_S_W_PHR
:
15991 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15993 case OPC_MAQ_SA_W_PHL
:
15995 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15997 case OPC_MAQ_SA_W_PHR
:
15999 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16001 case OPC_MULSA_W_PH
:
16003 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16007 #ifdef TARGET_MIPS64
16008 case OPC_DPAQ_W_QH_DSP
:
16010 int ac
= ret
& 0x03;
16011 tcg_gen_movi_i32(t0
, ac
);
16016 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16020 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16024 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16028 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16032 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16034 case OPC_DPAQ_S_W_QH
:
16036 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16038 case OPC_DPAQ_SA_L_PW
:
16040 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16042 case OPC_DPAU_H_OBL
:
16044 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16046 case OPC_DPAU_H_OBR
:
16048 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16052 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16054 case OPC_DPSQ_S_W_QH
:
16056 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16058 case OPC_DPSQ_SA_L_PW
:
16060 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16062 case OPC_DPSU_H_OBL
:
16064 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16066 case OPC_DPSU_H_OBR
:
16068 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16070 case OPC_MAQ_S_L_PWL
:
16072 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16074 case OPC_MAQ_S_L_PWR
:
16076 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16078 case OPC_MAQ_S_W_QHLL
:
16080 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16082 case OPC_MAQ_SA_W_QHLL
:
16084 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16086 case OPC_MAQ_S_W_QHLR
:
16088 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16090 case OPC_MAQ_SA_W_QHLR
:
16092 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16094 case OPC_MAQ_S_W_QHRL
:
16096 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16098 case OPC_MAQ_SA_W_QHRL
:
16100 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16102 case OPC_MAQ_S_W_QHRR
:
16104 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16106 case OPC_MAQ_SA_W_QHRR
:
16108 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16110 case OPC_MULSAQ_S_L_PW
:
16112 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16114 case OPC_MULSAQ_S_W_QH
:
16116 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16122 case OPC_ADDU_QB_DSP
:
16124 case OPC_MULEU_S_PH_QBL
:
16126 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16128 case OPC_MULEU_S_PH_QBR
:
16130 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16132 case OPC_MULQ_RS_PH
:
16134 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16136 case OPC_MULEQ_S_W_PHL
:
16138 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16140 case OPC_MULEQ_S_W_PHR
:
16142 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16144 case OPC_MULQ_S_PH
:
16146 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16150 #ifdef TARGET_MIPS64
16151 case OPC_ADDU_OB_DSP
:
16153 case OPC_MULEQ_S_PW_QHL
:
16155 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16157 case OPC_MULEQ_S_PW_QHR
:
16159 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16161 case OPC_MULEU_S_QH_OBL
:
16163 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16165 case OPC_MULEU_S_QH_OBR
:
16167 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16169 case OPC_MULQ_RS_QH
:
16171 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16178 tcg_temp_free_i32(t0
);
16179 tcg_temp_free(v1_t
);
16180 tcg_temp_free(v2_t
);
16183 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16191 /* Treat as NOP. */
16195 t0
= tcg_temp_new();
16196 val_t
= tcg_temp_new();
16197 gen_load_gpr(val_t
, val
);
16200 case OPC_ABSQ_S_PH_DSP
:
16204 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16209 target_long result
;
16210 imm
= (ctx
->opcode
>> 16) & 0xFF;
16211 result
= (uint32_t)imm
<< 24 |
16212 (uint32_t)imm
<< 16 |
16213 (uint32_t)imm
<< 8 |
16215 result
= (int32_t)result
;
16216 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16221 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16222 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16223 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16224 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16225 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16226 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16231 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16232 imm
= (int16_t)(imm
<< 6) >> 6;
16233 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16234 (target_long
)((int32_t)imm
<< 16 | \
16240 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16241 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16242 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16243 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16247 #ifdef TARGET_MIPS64
16248 case OPC_ABSQ_S_QH_DSP
:
16255 imm
= (ctx
->opcode
>> 16) & 0xFF;
16256 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16257 temp
= (temp
<< 16) | temp
;
16258 temp
= (temp
<< 32) | temp
;
16259 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16267 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16268 imm
= (int16_t)(imm
<< 6) >> 6;
16269 temp
= ((target_long
)imm
<< 32) \
16270 | ((target_long
)imm
& 0xFFFFFFFF);
16271 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16279 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16280 imm
= (int16_t)(imm
<< 6) >> 6;
16282 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16283 ((uint64_t)(uint16_t)imm
<< 32) |
16284 ((uint64_t)(uint16_t)imm
<< 16) |
16285 (uint64_t)(uint16_t)imm
;
16286 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16291 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16292 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16293 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16294 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16295 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16296 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16297 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16301 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16302 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16303 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16307 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16308 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16309 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16310 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16311 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16318 tcg_temp_free(val_t
);
16321 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16322 uint32_t op1
, uint32_t op2
,
16323 int ret
, int v1
, int v2
, int check_ret
)
16329 if ((ret
== 0) && (check_ret
== 1)) {
16330 /* Treat as NOP. */
16334 t1
= tcg_temp_new();
16335 v1_t
= tcg_temp_new();
16336 v2_t
= tcg_temp_new();
16338 gen_load_gpr(v1_t
, v1
);
16339 gen_load_gpr(v2_t
, v2
);
16342 case OPC_CMPU_EQ_QB_DSP
:
16344 case OPC_CMPU_EQ_QB
:
16346 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16348 case OPC_CMPU_LT_QB
:
16350 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16352 case OPC_CMPU_LE_QB
:
16354 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16356 case OPC_CMPGU_EQ_QB
:
16358 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16360 case OPC_CMPGU_LT_QB
:
16362 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16364 case OPC_CMPGU_LE_QB
:
16366 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16368 case OPC_CMPGDU_EQ_QB
:
16370 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16371 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16372 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16373 tcg_gen_shli_tl(t1
, t1
, 24);
16374 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16376 case OPC_CMPGDU_LT_QB
:
16378 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16379 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16380 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16381 tcg_gen_shli_tl(t1
, t1
, 24);
16382 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16384 case OPC_CMPGDU_LE_QB
:
16386 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16387 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16388 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16389 tcg_gen_shli_tl(t1
, t1
, 24);
16390 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16392 case OPC_CMP_EQ_PH
:
16394 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16396 case OPC_CMP_LT_PH
:
16398 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16400 case OPC_CMP_LE_PH
:
16402 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16406 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16410 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16412 case OPC_PACKRL_PH
:
16414 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16418 #ifdef TARGET_MIPS64
16419 case OPC_CMPU_EQ_OB_DSP
:
16421 case OPC_CMP_EQ_PW
:
16423 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16425 case OPC_CMP_LT_PW
:
16427 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16429 case OPC_CMP_LE_PW
:
16431 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16433 case OPC_CMP_EQ_QH
:
16435 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16437 case OPC_CMP_LT_QH
:
16439 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16441 case OPC_CMP_LE_QH
:
16443 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16445 case OPC_CMPGDU_EQ_OB
:
16447 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16449 case OPC_CMPGDU_LT_OB
:
16451 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16453 case OPC_CMPGDU_LE_OB
:
16455 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16457 case OPC_CMPGU_EQ_OB
:
16459 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16461 case OPC_CMPGU_LT_OB
:
16463 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16465 case OPC_CMPGU_LE_OB
:
16467 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16469 case OPC_CMPU_EQ_OB
:
16471 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16473 case OPC_CMPU_LT_OB
:
16475 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16477 case OPC_CMPU_LE_OB
:
16479 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16481 case OPC_PACKRL_PW
:
16483 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16487 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16491 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16495 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16503 tcg_temp_free(v1_t
);
16504 tcg_temp_free(v2_t
);
16507 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16508 uint32_t op1
, int rt
, int rs
, int sa
)
16515 /* Treat as NOP. */
16519 t0
= tcg_temp_new();
16520 gen_load_gpr(t0
, rs
);
16523 case OPC_APPEND_DSP
:
16524 switch (MASK_APPEND(ctx
->opcode
)) {
16527 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16529 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16533 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16534 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16535 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16536 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16538 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16542 if (sa
!= 0 && sa
!= 2) {
16543 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16544 tcg_gen_ext32u_tl(t0
, t0
);
16545 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16546 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16548 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16550 default: /* Invalid */
16551 MIPS_INVAL("MASK APPEND");
16552 generate_exception_end(ctx
, EXCP_RI
);
16556 #ifdef TARGET_MIPS64
16557 case OPC_DAPPEND_DSP
:
16558 switch (MASK_DAPPEND(ctx
->opcode
)) {
16561 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16565 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16566 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16567 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16571 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16572 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16573 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16578 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16579 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16580 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16581 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16584 default: /* Invalid */
16585 MIPS_INVAL("MASK DAPPEND");
16586 generate_exception_end(ctx
, EXCP_RI
);
16595 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16596 int ret
, int v1
, int v2
, int check_ret
)
16605 if ((ret
== 0) && (check_ret
== 1)) {
16606 /* Treat as NOP. */
16610 t0
= tcg_temp_new();
16611 t1
= tcg_temp_new();
16612 v1_t
= tcg_temp_new();
16613 v2_t
= tcg_temp_new();
16615 gen_load_gpr(v1_t
, v1
);
16616 gen_load_gpr(v2_t
, v2
);
16619 case OPC_EXTR_W_DSP
:
16623 tcg_gen_movi_tl(t0
, v2
);
16624 tcg_gen_movi_tl(t1
, v1
);
16625 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16628 tcg_gen_movi_tl(t0
, v2
);
16629 tcg_gen_movi_tl(t1
, v1
);
16630 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16632 case OPC_EXTR_RS_W
:
16633 tcg_gen_movi_tl(t0
, v2
);
16634 tcg_gen_movi_tl(t1
, v1
);
16635 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16638 tcg_gen_movi_tl(t0
, v2
);
16639 tcg_gen_movi_tl(t1
, v1
);
16640 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16642 case OPC_EXTRV_S_H
:
16643 tcg_gen_movi_tl(t0
, v2
);
16644 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16647 tcg_gen_movi_tl(t0
, v2
);
16648 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16650 case OPC_EXTRV_R_W
:
16651 tcg_gen_movi_tl(t0
, v2
);
16652 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16654 case OPC_EXTRV_RS_W
:
16655 tcg_gen_movi_tl(t0
, v2
);
16656 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16659 tcg_gen_movi_tl(t0
, v2
);
16660 tcg_gen_movi_tl(t1
, v1
);
16661 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16664 tcg_gen_movi_tl(t0
, v2
);
16665 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16668 tcg_gen_movi_tl(t0
, v2
);
16669 tcg_gen_movi_tl(t1
, v1
);
16670 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16673 tcg_gen_movi_tl(t0
, v2
);
16674 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16677 imm
= (ctx
->opcode
>> 20) & 0x3F;
16678 tcg_gen_movi_tl(t0
, ret
);
16679 tcg_gen_movi_tl(t1
, imm
);
16680 gen_helper_shilo(t0
, t1
, cpu_env
);
16683 tcg_gen_movi_tl(t0
, ret
);
16684 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16687 tcg_gen_movi_tl(t0
, ret
);
16688 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16691 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16692 tcg_gen_movi_tl(t0
, imm
);
16693 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16696 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16697 tcg_gen_movi_tl(t0
, imm
);
16698 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16702 #ifdef TARGET_MIPS64
16703 case OPC_DEXTR_W_DSP
:
16707 tcg_gen_movi_tl(t0
, ret
);
16708 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16712 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16713 int ac
= (ctx
->opcode
>> 11) & 0x03;
16714 tcg_gen_movi_tl(t0
, shift
);
16715 tcg_gen_movi_tl(t1
, ac
);
16716 gen_helper_dshilo(t0
, t1
, cpu_env
);
16721 int ac
= (ctx
->opcode
>> 11) & 0x03;
16722 tcg_gen_movi_tl(t0
, ac
);
16723 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16727 tcg_gen_movi_tl(t0
, v2
);
16728 tcg_gen_movi_tl(t1
, v1
);
16730 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16733 tcg_gen_movi_tl(t0
, v2
);
16734 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16737 tcg_gen_movi_tl(t0
, v2
);
16738 tcg_gen_movi_tl(t1
, v1
);
16739 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16742 tcg_gen_movi_tl(t0
, v2
);
16743 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16746 tcg_gen_movi_tl(t0
, v2
);
16747 tcg_gen_movi_tl(t1
, v1
);
16748 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16750 case OPC_DEXTR_R_L
:
16751 tcg_gen_movi_tl(t0
, v2
);
16752 tcg_gen_movi_tl(t1
, v1
);
16753 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16755 case OPC_DEXTR_RS_L
:
16756 tcg_gen_movi_tl(t0
, v2
);
16757 tcg_gen_movi_tl(t1
, v1
);
16758 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16761 tcg_gen_movi_tl(t0
, v2
);
16762 tcg_gen_movi_tl(t1
, v1
);
16763 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16765 case OPC_DEXTR_R_W
:
16766 tcg_gen_movi_tl(t0
, v2
);
16767 tcg_gen_movi_tl(t1
, v1
);
16768 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16770 case OPC_DEXTR_RS_W
:
16771 tcg_gen_movi_tl(t0
, v2
);
16772 tcg_gen_movi_tl(t1
, v1
);
16773 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16775 case OPC_DEXTR_S_H
:
16776 tcg_gen_movi_tl(t0
, v2
);
16777 tcg_gen_movi_tl(t1
, v1
);
16778 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16780 case OPC_DEXTRV_S_H
:
16781 tcg_gen_movi_tl(t0
, v2
);
16782 tcg_gen_movi_tl(t1
, v1
);
16783 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16786 tcg_gen_movi_tl(t0
, v2
);
16787 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16789 case OPC_DEXTRV_R_L
:
16790 tcg_gen_movi_tl(t0
, v2
);
16791 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16793 case OPC_DEXTRV_RS_L
:
16794 tcg_gen_movi_tl(t0
, v2
);
16795 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16798 tcg_gen_movi_tl(t0
, v2
);
16799 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16801 case OPC_DEXTRV_R_W
:
16802 tcg_gen_movi_tl(t0
, v2
);
16803 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16805 case OPC_DEXTRV_RS_W
:
16806 tcg_gen_movi_tl(t0
, v2
);
16807 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16816 tcg_temp_free(v1_t
);
16817 tcg_temp_free(v2_t
);
16820 /* End MIPSDSP functions. */
16822 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16824 int rs
, rt
, rd
, sa
;
16827 rs
= (ctx
->opcode
>> 21) & 0x1f;
16828 rt
= (ctx
->opcode
>> 16) & 0x1f;
16829 rd
= (ctx
->opcode
>> 11) & 0x1f;
16830 sa
= (ctx
->opcode
>> 6) & 0x1f;
16832 op1
= MASK_SPECIAL(ctx
->opcode
);
16835 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16837 case OPC_MULT
... OPC_DIVU
:
16838 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16848 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16851 MIPS_INVAL("special_r6 muldiv");
16852 generate_exception_end(ctx
, EXCP_RI
);
16858 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16862 if (rt
== 0 && sa
== 1) {
16863 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16864 We need additionally to check other fields */
16865 gen_cl(ctx
, op1
, rd
, rs
);
16867 generate_exception_end(ctx
, EXCP_RI
);
16871 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16872 gen_helper_do_semihosting(cpu_env
);
16874 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16875 generate_exception_end(ctx
, EXCP_RI
);
16877 generate_exception_end(ctx
, EXCP_DBp
);
16881 #if defined(TARGET_MIPS64)
16883 check_mips_64(ctx
);
16884 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16888 if (rt
== 0 && sa
== 1) {
16889 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16890 We need additionally to check other fields */
16891 check_mips_64(ctx
);
16892 gen_cl(ctx
, op1
, rd
, rs
);
16894 generate_exception_end(ctx
, EXCP_RI
);
16897 case OPC_DMULT
... OPC_DDIVU
:
16898 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16908 check_mips_64(ctx
);
16909 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16912 MIPS_INVAL("special_r6 muldiv");
16913 generate_exception_end(ctx
, EXCP_RI
);
16918 default: /* Invalid */
16919 MIPS_INVAL("special_r6");
16920 generate_exception_end(ctx
, EXCP_RI
);
16925 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16927 int rs
, rt
, rd
, sa
;
16930 rs
= (ctx
->opcode
>> 21) & 0x1f;
16931 rt
= (ctx
->opcode
>> 16) & 0x1f;
16932 rd
= (ctx
->opcode
>> 11) & 0x1f;
16933 sa
= (ctx
->opcode
>> 6) & 0x1f;
16935 op1
= MASK_SPECIAL(ctx
->opcode
);
16937 case OPC_MOVN
: /* Conditional move */
16939 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16940 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16941 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16943 case OPC_MFHI
: /* Move from HI/LO */
16945 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16948 case OPC_MTLO
: /* Move to HI/LO */
16949 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16952 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16953 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16954 check_cp1_enabled(ctx
);
16955 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16956 (ctx
->opcode
>> 16) & 1);
16958 generate_exception_err(ctx
, EXCP_CpU
, 1);
16964 check_insn(ctx
, INSN_VR54XX
);
16965 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16966 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16968 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16973 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16975 #if defined(TARGET_MIPS64)
16976 case OPC_DMULT
... OPC_DDIVU
:
16977 check_insn(ctx
, ISA_MIPS3
);
16978 check_mips_64(ctx
);
16979 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16983 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16986 #ifdef MIPS_STRICT_STANDARD
16987 MIPS_INVAL("SPIM");
16988 generate_exception_end(ctx
, EXCP_RI
);
16990 /* Implemented as RI exception for now. */
16991 MIPS_INVAL("spim (unofficial)");
16992 generate_exception_end(ctx
, EXCP_RI
);
16995 default: /* Invalid */
16996 MIPS_INVAL("special_legacy");
16997 generate_exception_end(ctx
, EXCP_RI
);
17002 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17004 int rs
, rt
, rd
, sa
;
17007 rs
= (ctx
->opcode
>> 21) & 0x1f;
17008 rt
= (ctx
->opcode
>> 16) & 0x1f;
17009 rd
= (ctx
->opcode
>> 11) & 0x1f;
17010 sa
= (ctx
->opcode
>> 6) & 0x1f;
17012 op1
= MASK_SPECIAL(ctx
->opcode
);
17014 case OPC_SLL
: /* Shift with immediate */
17015 if (sa
== 5 && rd
== 0 &&
17016 rs
== 0 && rt
== 0) { /* PAUSE */
17017 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17018 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17019 generate_exception_end(ctx
, EXCP_RI
);
17025 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17028 switch ((ctx
->opcode
>> 21) & 0x1f) {
17030 /* rotr is decoded as srl on non-R2 CPUs */
17031 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17036 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17039 generate_exception_end(ctx
, EXCP_RI
);
17043 case OPC_ADD
... OPC_SUBU
:
17044 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17046 case OPC_SLLV
: /* Shifts */
17048 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17051 switch ((ctx
->opcode
>> 6) & 0x1f) {
17053 /* rotrv is decoded as srlv on non-R2 CPUs */
17054 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17059 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17062 generate_exception_end(ctx
, EXCP_RI
);
17066 case OPC_SLT
: /* Set on less than */
17068 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17070 case OPC_AND
: /* Logic*/
17074 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17077 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17079 case OPC_TGE
... OPC_TEQ
: /* Traps */
17081 check_insn(ctx
, ISA_MIPS2
);
17082 gen_trap(ctx
, op1
, rs
, rt
, -1);
17084 case OPC_LSA
: /* OPC_PMON */
17085 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17086 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17087 decode_opc_special_r6(env
, ctx
);
17089 /* Pmon entry point, also R4010 selsl */
17090 #ifdef MIPS_STRICT_STANDARD
17091 MIPS_INVAL("PMON / selsl");
17092 generate_exception_end(ctx
, EXCP_RI
);
17094 gen_helper_0e0i(pmon
, sa
);
17099 generate_exception_end(ctx
, EXCP_SYSCALL
);
17102 generate_exception_end(ctx
, EXCP_BREAK
);
17105 check_insn(ctx
, ISA_MIPS2
);
17106 /* Treat as NOP. */
17109 #if defined(TARGET_MIPS64)
17110 /* MIPS64 specific opcodes */
17115 check_insn(ctx
, ISA_MIPS3
);
17116 check_mips_64(ctx
);
17117 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17120 switch ((ctx
->opcode
>> 21) & 0x1f) {
17122 /* drotr is decoded as dsrl on non-R2 CPUs */
17123 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17128 check_insn(ctx
, ISA_MIPS3
);
17129 check_mips_64(ctx
);
17130 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17133 generate_exception_end(ctx
, EXCP_RI
);
17138 switch ((ctx
->opcode
>> 21) & 0x1f) {
17140 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17141 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17146 check_insn(ctx
, ISA_MIPS3
);
17147 check_mips_64(ctx
);
17148 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17151 generate_exception_end(ctx
, EXCP_RI
);
17155 case OPC_DADD
... OPC_DSUBU
:
17156 check_insn(ctx
, ISA_MIPS3
);
17157 check_mips_64(ctx
);
17158 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17162 check_insn(ctx
, ISA_MIPS3
);
17163 check_mips_64(ctx
);
17164 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17167 switch ((ctx
->opcode
>> 6) & 0x1f) {
17169 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17170 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17175 check_insn(ctx
, ISA_MIPS3
);
17176 check_mips_64(ctx
);
17177 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17180 generate_exception_end(ctx
, EXCP_RI
);
17185 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17186 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17187 decode_opc_special_r6(env
, ctx
);
17192 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17193 decode_opc_special_r6(env
, ctx
);
17195 decode_opc_special_legacy(env
, ctx
);
17200 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17207 rs
= (ctx
->opcode
>> 21) & 0x1f;
17208 rt
= (ctx
->opcode
>> 16) & 0x1f;
17209 rd
= (ctx
->opcode
>> 11) & 0x1f;
17211 op1
= MASK_SPECIAL2(ctx
->opcode
);
17213 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17214 case OPC_MSUB
... OPC_MSUBU
:
17215 check_insn(ctx
, ISA_MIPS32
);
17216 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17219 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17222 case OPC_DIVU_G_2F
:
17223 case OPC_MULT_G_2F
:
17224 case OPC_MULTU_G_2F
:
17226 case OPC_MODU_G_2F
:
17227 check_insn(ctx
, INSN_LOONGSON2F
);
17228 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17232 check_insn(ctx
, ISA_MIPS32
);
17233 gen_cl(ctx
, op1
, rd
, rs
);
17236 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17237 gen_helper_do_semihosting(cpu_env
);
17239 /* XXX: not clear which exception should be raised
17240 * when in debug mode...
17242 check_insn(ctx
, ISA_MIPS32
);
17243 generate_exception_end(ctx
, EXCP_DBp
);
17246 #if defined(TARGET_MIPS64)
17249 check_insn(ctx
, ISA_MIPS64
);
17250 check_mips_64(ctx
);
17251 gen_cl(ctx
, op1
, rd
, rs
);
17253 case OPC_DMULT_G_2F
:
17254 case OPC_DMULTU_G_2F
:
17255 case OPC_DDIV_G_2F
:
17256 case OPC_DDIVU_G_2F
:
17257 case OPC_DMOD_G_2F
:
17258 case OPC_DMODU_G_2F
:
17259 check_insn(ctx
, INSN_LOONGSON2F
);
17260 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17263 default: /* Invalid */
17264 MIPS_INVAL("special2_legacy");
17265 generate_exception_end(ctx
, EXCP_RI
);
17270 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17272 int rs
, rt
, rd
, sa
;
17276 rs
= (ctx
->opcode
>> 21) & 0x1f;
17277 rt
= (ctx
->opcode
>> 16) & 0x1f;
17278 rd
= (ctx
->opcode
>> 11) & 0x1f;
17279 sa
= (ctx
->opcode
>> 6) & 0x1f;
17280 imm
= (int16_t)ctx
->opcode
>> 7;
17282 op1
= MASK_SPECIAL3(ctx
->opcode
);
17286 /* hint codes 24-31 are reserved and signal RI */
17287 generate_exception_end(ctx
, EXCP_RI
);
17289 /* Treat as NOP. */
17292 check_cp0_enabled(ctx
);
17293 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17294 gen_cache_operation(ctx
, rt
, rs
, imm
);
17298 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17301 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17306 /* Treat as NOP. */
17309 op2
= MASK_BSHFL(ctx
->opcode
);
17311 case OPC_ALIGN
... OPC_ALIGN_END
:
17312 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17315 gen_bitswap(ctx
, op2
, rd
, rt
);
17320 #if defined(TARGET_MIPS64)
17322 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17325 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17328 check_mips_64(ctx
);
17331 /* Treat as NOP. */
17334 op2
= MASK_DBSHFL(ctx
->opcode
);
17336 case OPC_DALIGN
... OPC_DALIGN_END
:
17337 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17340 gen_bitswap(ctx
, op2
, rd
, rt
);
17347 default: /* Invalid */
17348 MIPS_INVAL("special3_r6");
17349 generate_exception_end(ctx
, EXCP_RI
);
17354 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17359 rs
= (ctx
->opcode
>> 21) & 0x1f;
17360 rt
= (ctx
->opcode
>> 16) & 0x1f;
17361 rd
= (ctx
->opcode
>> 11) & 0x1f;
17363 op1
= MASK_SPECIAL3(ctx
->opcode
);
17365 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17366 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17367 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17368 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17369 * the same mask and op1. */
17370 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17371 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17374 case OPC_ADDUH_R_QB
:
17376 case OPC_ADDQH_R_PH
:
17378 case OPC_ADDQH_R_W
:
17380 case OPC_SUBUH_R_QB
:
17382 case OPC_SUBQH_R_PH
:
17384 case OPC_SUBQH_R_W
:
17385 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17390 case OPC_MULQ_RS_W
:
17391 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17394 MIPS_INVAL("MASK ADDUH.QB");
17395 generate_exception_end(ctx
, EXCP_RI
);
17398 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17399 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17401 generate_exception_end(ctx
, EXCP_RI
);
17405 op2
= MASK_LX(ctx
->opcode
);
17407 #if defined(TARGET_MIPS64)
17413 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17415 default: /* Invalid */
17416 MIPS_INVAL("MASK LX");
17417 generate_exception_end(ctx
, EXCP_RI
);
17421 case OPC_ABSQ_S_PH_DSP
:
17422 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17424 case OPC_ABSQ_S_QB
:
17425 case OPC_ABSQ_S_PH
:
17427 case OPC_PRECEQ_W_PHL
:
17428 case OPC_PRECEQ_W_PHR
:
17429 case OPC_PRECEQU_PH_QBL
:
17430 case OPC_PRECEQU_PH_QBR
:
17431 case OPC_PRECEQU_PH_QBLA
:
17432 case OPC_PRECEQU_PH_QBRA
:
17433 case OPC_PRECEU_PH_QBL
:
17434 case OPC_PRECEU_PH_QBR
:
17435 case OPC_PRECEU_PH_QBLA
:
17436 case OPC_PRECEU_PH_QBRA
:
17437 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17444 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17447 MIPS_INVAL("MASK ABSQ_S.PH");
17448 generate_exception_end(ctx
, EXCP_RI
);
17452 case OPC_ADDU_QB_DSP
:
17453 op2
= MASK_ADDU_QB(ctx
->opcode
);
17456 case OPC_ADDQ_S_PH
:
17459 case OPC_ADDU_S_QB
:
17461 case OPC_ADDU_S_PH
:
17463 case OPC_SUBQ_S_PH
:
17466 case OPC_SUBU_S_QB
:
17468 case OPC_SUBU_S_PH
:
17472 case OPC_RADDU_W_QB
:
17473 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17475 case OPC_MULEU_S_PH_QBL
:
17476 case OPC_MULEU_S_PH_QBR
:
17477 case OPC_MULQ_RS_PH
:
17478 case OPC_MULEQ_S_W_PHL
:
17479 case OPC_MULEQ_S_W_PHR
:
17480 case OPC_MULQ_S_PH
:
17481 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17483 default: /* Invalid */
17484 MIPS_INVAL("MASK ADDU.QB");
17485 generate_exception_end(ctx
, EXCP_RI
);
17490 case OPC_CMPU_EQ_QB_DSP
:
17491 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17493 case OPC_PRECR_SRA_PH_W
:
17494 case OPC_PRECR_SRA_R_PH_W
:
17495 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17497 case OPC_PRECR_QB_PH
:
17498 case OPC_PRECRQ_QB_PH
:
17499 case OPC_PRECRQ_PH_W
:
17500 case OPC_PRECRQ_RS_PH_W
:
17501 case OPC_PRECRQU_S_QB_PH
:
17502 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17504 case OPC_CMPU_EQ_QB
:
17505 case OPC_CMPU_LT_QB
:
17506 case OPC_CMPU_LE_QB
:
17507 case OPC_CMP_EQ_PH
:
17508 case OPC_CMP_LT_PH
:
17509 case OPC_CMP_LE_PH
:
17510 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17512 case OPC_CMPGU_EQ_QB
:
17513 case OPC_CMPGU_LT_QB
:
17514 case OPC_CMPGU_LE_QB
:
17515 case OPC_CMPGDU_EQ_QB
:
17516 case OPC_CMPGDU_LT_QB
:
17517 case OPC_CMPGDU_LE_QB
:
17520 case OPC_PACKRL_PH
:
17521 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17523 default: /* Invalid */
17524 MIPS_INVAL("MASK CMPU.EQ.QB");
17525 generate_exception_end(ctx
, EXCP_RI
);
17529 case OPC_SHLL_QB_DSP
:
17530 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17532 case OPC_DPA_W_PH_DSP
:
17533 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17535 case OPC_DPAU_H_QBL
:
17536 case OPC_DPAU_H_QBR
:
17537 case OPC_DPSU_H_QBL
:
17538 case OPC_DPSU_H_QBR
:
17540 case OPC_DPAX_W_PH
:
17541 case OPC_DPAQ_S_W_PH
:
17542 case OPC_DPAQX_S_W_PH
:
17543 case OPC_DPAQX_SA_W_PH
:
17545 case OPC_DPSX_W_PH
:
17546 case OPC_DPSQ_S_W_PH
:
17547 case OPC_DPSQX_S_W_PH
:
17548 case OPC_DPSQX_SA_W_PH
:
17549 case OPC_MULSAQ_S_W_PH
:
17550 case OPC_DPAQ_SA_L_W
:
17551 case OPC_DPSQ_SA_L_W
:
17552 case OPC_MAQ_S_W_PHL
:
17553 case OPC_MAQ_S_W_PHR
:
17554 case OPC_MAQ_SA_W_PHL
:
17555 case OPC_MAQ_SA_W_PHR
:
17556 case OPC_MULSA_W_PH
:
17557 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17559 default: /* Invalid */
17560 MIPS_INVAL("MASK DPAW.PH");
17561 generate_exception_end(ctx
, EXCP_RI
);
17566 op2
= MASK_INSV(ctx
->opcode
);
17577 t0
= tcg_temp_new();
17578 t1
= tcg_temp_new();
17580 gen_load_gpr(t0
, rt
);
17581 gen_load_gpr(t1
, rs
);
17583 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17589 default: /* Invalid */
17590 MIPS_INVAL("MASK INSV");
17591 generate_exception_end(ctx
, EXCP_RI
);
17595 case OPC_APPEND_DSP
:
17596 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17598 case OPC_EXTR_W_DSP
:
17599 op2
= MASK_EXTR_W(ctx
->opcode
);
17603 case OPC_EXTR_RS_W
:
17605 case OPC_EXTRV_S_H
:
17607 case OPC_EXTRV_R_W
:
17608 case OPC_EXTRV_RS_W
:
17613 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17616 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17622 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17624 default: /* Invalid */
17625 MIPS_INVAL("MASK EXTR.W");
17626 generate_exception_end(ctx
, EXCP_RI
);
17630 #if defined(TARGET_MIPS64)
17631 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17632 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17633 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17634 check_insn(ctx
, INSN_LOONGSON2E
);
17635 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17637 case OPC_ABSQ_S_QH_DSP
:
17638 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17640 case OPC_PRECEQ_L_PWL
:
17641 case OPC_PRECEQ_L_PWR
:
17642 case OPC_PRECEQ_PW_QHL
:
17643 case OPC_PRECEQ_PW_QHR
:
17644 case OPC_PRECEQ_PW_QHLA
:
17645 case OPC_PRECEQ_PW_QHRA
:
17646 case OPC_PRECEQU_QH_OBL
:
17647 case OPC_PRECEQU_QH_OBR
:
17648 case OPC_PRECEQU_QH_OBLA
:
17649 case OPC_PRECEQU_QH_OBRA
:
17650 case OPC_PRECEU_QH_OBL
:
17651 case OPC_PRECEU_QH_OBR
:
17652 case OPC_PRECEU_QH_OBLA
:
17653 case OPC_PRECEU_QH_OBRA
:
17654 case OPC_ABSQ_S_OB
:
17655 case OPC_ABSQ_S_PW
:
17656 case OPC_ABSQ_S_QH
:
17657 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17665 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17667 default: /* Invalid */
17668 MIPS_INVAL("MASK ABSQ_S.QH");
17669 generate_exception_end(ctx
, EXCP_RI
);
17673 case OPC_ADDU_OB_DSP
:
17674 op2
= MASK_ADDU_OB(ctx
->opcode
);
17676 case OPC_RADDU_L_OB
:
17678 case OPC_SUBQ_S_PW
:
17680 case OPC_SUBQ_S_QH
:
17682 case OPC_SUBU_S_OB
:
17684 case OPC_SUBU_S_QH
:
17686 case OPC_SUBUH_R_OB
:
17688 case OPC_ADDQ_S_PW
:
17690 case OPC_ADDQ_S_QH
:
17692 case OPC_ADDU_S_OB
:
17694 case OPC_ADDU_S_QH
:
17696 case OPC_ADDUH_R_OB
:
17697 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17699 case OPC_MULEQ_S_PW_QHL
:
17700 case OPC_MULEQ_S_PW_QHR
:
17701 case OPC_MULEU_S_QH_OBL
:
17702 case OPC_MULEU_S_QH_OBR
:
17703 case OPC_MULQ_RS_QH
:
17704 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17706 default: /* Invalid */
17707 MIPS_INVAL("MASK ADDU.OB");
17708 generate_exception_end(ctx
, EXCP_RI
);
17712 case OPC_CMPU_EQ_OB_DSP
:
17713 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17715 case OPC_PRECR_SRA_QH_PW
:
17716 case OPC_PRECR_SRA_R_QH_PW
:
17717 /* Return value is rt. */
17718 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17720 case OPC_PRECR_OB_QH
:
17721 case OPC_PRECRQ_OB_QH
:
17722 case OPC_PRECRQ_PW_L
:
17723 case OPC_PRECRQ_QH_PW
:
17724 case OPC_PRECRQ_RS_QH_PW
:
17725 case OPC_PRECRQU_S_OB_QH
:
17726 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17728 case OPC_CMPU_EQ_OB
:
17729 case OPC_CMPU_LT_OB
:
17730 case OPC_CMPU_LE_OB
:
17731 case OPC_CMP_EQ_QH
:
17732 case OPC_CMP_LT_QH
:
17733 case OPC_CMP_LE_QH
:
17734 case OPC_CMP_EQ_PW
:
17735 case OPC_CMP_LT_PW
:
17736 case OPC_CMP_LE_PW
:
17737 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17739 case OPC_CMPGDU_EQ_OB
:
17740 case OPC_CMPGDU_LT_OB
:
17741 case OPC_CMPGDU_LE_OB
:
17742 case OPC_CMPGU_EQ_OB
:
17743 case OPC_CMPGU_LT_OB
:
17744 case OPC_CMPGU_LE_OB
:
17745 case OPC_PACKRL_PW
:
17749 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17751 default: /* Invalid */
17752 MIPS_INVAL("MASK CMPU_EQ.OB");
17753 generate_exception_end(ctx
, EXCP_RI
);
17757 case OPC_DAPPEND_DSP
:
17758 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17760 case OPC_DEXTR_W_DSP
:
17761 op2
= MASK_DEXTR_W(ctx
->opcode
);
17768 case OPC_DEXTR_R_L
:
17769 case OPC_DEXTR_RS_L
:
17771 case OPC_DEXTR_R_W
:
17772 case OPC_DEXTR_RS_W
:
17773 case OPC_DEXTR_S_H
:
17775 case OPC_DEXTRV_R_L
:
17776 case OPC_DEXTRV_RS_L
:
17777 case OPC_DEXTRV_S_H
:
17779 case OPC_DEXTRV_R_W
:
17780 case OPC_DEXTRV_RS_W
:
17781 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17786 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17788 default: /* Invalid */
17789 MIPS_INVAL("MASK EXTR.W");
17790 generate_exception_end(ctx
, EXCP_RI
);
17794 case OPC_DPAQ_W_QH_DSP
:
17795 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17797 case OPC_DPAU_H_OBL
:
17798 case OPC_DPAU_H_OBR
:
17799 case OPC_DPSU_H_OBL
:
17800 case OPC_DPSU_H_OBR
:
17802 case OPC_DPAQ_S_W_QH
:
17804 case OPC_DPSQ_S_W_QH
:
17805 case OPC_MULSAQ_S_W_QH
:
17806 case OPC_DPAQ_SA_L_PW
:
17807 case OPC_DPSQ_SA_L_PW
:
17808 case OPC_MULSAQ_S_L_PW
:
17809 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17811 case OPC_MAQ_S_W_QHLL
:
17812 case OPC_MAQ_S_W_QHLR
:
17813 case OPC_MAQ_S_W_QHRL
:
17814 case OPC_MAQ_S_W_QHRR
:
17815 case OPC_MAQ_SA_W_QHLL
:
17816 case OPC_MAQ_SA_W_QHLR
:
17817 case OPC_MAQ_SA_W_QHRL
:
17818 case OPC_MAQ_SA_W_QHRR
:
17819 case OPC_MAQ_S_L_PWL
:
17820 case OPC_MAQ_S_L_PWR
:
17825 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17827 default: /* Invalid */
17828 MIPS_INVAL("MASK DPAQ.W.QH");
17829 generate_exception_end(ctx
, EXCP_RI
);
17833 case OPC_DINSV_DSP
:
17834 op2
= MASK_INSV(ctx
->opcode
);
17845 t0
= tcg_temp_new();
17846 t1
= tcg_temp_new();
17848 gen_load_gpr(t0
, rt
);
17849 gen_load_gpr(t1
, rs
);
17851 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17857 default: /* Invalid */
17858 MIPS_INVAL("MASK DINSV");
17859 generate_exception_end(ctx
, EXCP_RI
);
17863 case OPC_SHLL_OB_DSP
:
17864 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17867 default: /* Invalid */
17868 MIPS_INVAL("special3_legacy");
17869 generate_exception_end(ctx
, EXCP_RI
);
17874 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17876 int rs
, rt
, rd
, sa
;
17879 rs
= (ctx
->opcode
>> 21) & 0x1f;
17880 rt
= (ctx
->opcode
>> 16) & 0x1f;
17881 rd
= (ctx
->opcode
>> 11) & 0x1f;
17882 sa
= (ctx
->opcode
>> 6) & 0x1f;
17884 op1
= MASK_SPECIAL3(ctx
->opcode
);
17888 check_insn(ctx
, ISA_MIPS32R2
);
17889 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17892 op2
= MASK_BSHFL(ctx
->opcode
);
17894 case OPC_ALIGN
... OPC_ALIGN_END
:
17896 check_insn(ctx
, ISA_MIPS32R6
);
17897 decode_opc_special3_r6(env
, ctx
);
17900 check_insn(ctx
, ISA_MIPS32R2
);
17901 gen_bshfl(ctx
, op2
, rt
, rd
);
17905 #if defined(TARGET_MIPS64)
17906 case OPC_DEXTM
... OPC_DEXT
:
17907 case OPC_DINSM
... OPC_DINS
:
17908 check_insn(ctx
, ISA_MIPS64R2
);
17909 check_mips_64(ctx
);
17910 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17913 op2
= MASK_DBSHFL(ctx
->opcode
);
17915 case OPC_DALIGN
... OPC_DALIGN_END
:
17917 check_insn(ctx
, ISA_MIPS32R6
);
17918 decode_opc_special3_r6(env
, ctx
);
17921 check_insn(ctx
, ISA_MIPS64R2
);
17922 check_mips_64(ctx
);
17923 op2
= MASK_DBSHFL(ctx
->opcode
);
17924 gen_bshfl(ctx
, op2
, rt
, rd
);
17930 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17933 check_insn(ctx
, ASE_MT
);
17935 TCGv t0
= tcg_temp_new();
17936 TCGv t1
= tcg_temp_new();
17938 gen_load_gpr(t0
, rt
);
17939 gen_load_gpr(t1
, rs
);
17940 gen_helper_fork(t0
, t1
);
17946 check_insn(ctx
, ASE_MT
);
17948 TCGv t0
= tcg_temp_new();
17950 gen_load_gpr(t0
, rs
);
17951 gen_helper_yield(t0
, cpu_env
, t0
);
17952 gen_store_gpr(t0
, rd
);
17957 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17958 decode_opc_special3_r6(env
, ctx
);
17960 decode_opc_special3_legacy(env
, ctx
);
17965 /* MIPS SIMD Architecture (MSA) */
17966 static inline int check_msa_access(DisasContext
*ctx
)
17968 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17969 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17970 generate_exception_end(ctx
, EXCP_RI
);
17974 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17975 if (ctx
->insn_flags
& ASE_MSA
) {
17976 generate_exception_end(ctx
, EXCP_MSADIS
);
17979 generate_exception_end(ctx
, EXCP_RI
);
17986 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17988 /* generates tcg ops to check if any element is 0 */
17989 /* Note this function only works with MSA_WRLEN = 128 */
17990 uint64_t eval_zero_or_big
= 0;
17991 uint64_t eval_big
= 0;
17992 TCGv_i64 t0
= tcg_temp_new_i64();
17993 TCGv_i64 t1
= tcg_temp_new_i64();
17996 eval_zero_or_big
= 0x0101010101010101ULL
;
17997 eval_big
= 0x8080808080808080ULL
;
18000 eval_zero_or_big
= 0x0001000100010001ULL
;
18001 eval_big
= 0x8000800080008000ULL
;
18004 eval_zero_or_big
= 0x0000000100000001ULL
;
18005 eval_big
= 0x8000000080000000ULL
;
18008 eval_zero_or_big
= 0x0000000000000001ULL
;
18009 eval_big
= 0x8000000000000000ULL
;
18012 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18013 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18014 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18015 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18016 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18017 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18018 tcg_gen_or_i64(t0
, t0
, t1
);
18019 /* if all bits are zero then all elements are not zero */
18020 /* if some bit is non-zero then some element is zero */
18021 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18022 tcg_gen_trunc_i64_tl(tresult
, t0
);
18023 tcg_temp_free_i64(t0
);
18024 tcg_temp_free_i64(t1
);
18027 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18029 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18030 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18031 int64_t s16
= (int16_t)ctx
->opcode
;
18033 check_msa_access(ctx
);
18035 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18036 generate_exception_end(ctx
, EXCP_RI
);
18043 TCGv_i64 t0
= tcg_temp_new_i64();
18044 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18045 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18046 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18047 tcg_gen_trunc_i64_tl(bcond
, t0
);
18048 tcg_temp_free_i64(t0
);
18055 gen_check_zero_element(bcond
, df
, wt
);
18061 gen_check_zero_element(bcond
, df
, wt
);
18062 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18066 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18068 ctx
->hflags
|= MIPS_HFLAG_BC
;
18069 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18072 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18074 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18075 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18076 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18077 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18079 TCGv_i32 twd
= tcg_const_i32(wd
);
18080 TCGv_i32 tws
= tcg_const_i32(ws
);
18081 TCGv_i32 ti8
= tcg_const_i32(i8
);
18083 switch (MASK_MSA_I8(ctx
->opcode
)) {
18085 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18088 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18091 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18094 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18097 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18100 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18103 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18109 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18110 if (df
== DF_DOUBLE
) {
18111 generate_exception_end(ctx
, EXCP_RI
);
18113 TCGv_i32 tdf
= tcg_const_i32(df
);
18114 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18115 tcg_temp_free_i32(tdf
);
18120 MIPS_INVAL("MSA instruction");
18121 generate_exception_end(ctx
, EXCP_RI
);
18125 tcg_temp_free_i32(twd
);
18126 tcg_temp_free_i32(tws
);
18127 tcg_temp_free_i32(ti8
);
18130 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18132 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18133 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18134 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18135 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18136 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18137 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18139 TCGv_i32 tdf
= tcg_const_i32(df
);
18140 TCGv_i32 twd
= tcg_const_i32(wd
);
18141 TCGv_i32 tws
= tcg_const_i32(ws
);
18142 TCGv_i32 timm
= tcg_temp_new_i32();
18143 tcg_gen_movi_i32(timm
, u5
);
18145 switch (MASK_MSA_I5(ctx
->opcode
)) {
18147 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18150 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18152 case OPC_MAXI_S_df
:
18153 tcg_gen_movi_i32(timm
, s5
);
18154 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18156 case OPC_MAXI_U_df
:
18157 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18159 case OPC_MINI_S_df
:
18160 tcg_gen_movi_i32(timm
, s5
);
18161 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18163 case OPC_MINI_U_df
:
18164 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18167 tcg_gen_movi_i32(timm
, s5
);
18168 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18170 case OPC_CLTI_S_df
:
18171 tcg_gen_movi_i32(timm
, s5
);
18172 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18174 case OPC_CLTI_U_df
:
18175 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18177 case OPC_CLEI_S_df
:
18178 tcg_gen_movi_i32(timm
, s5
);
18179 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18181 case OPC_CLEI_U_df
:
18182 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18186 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18187 tcg_gen_movi_i32(timm
, s10
);
18188 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18192 MIPS_INVAL("MSA instruction");
18193 generate_exception_end(ctx
, EXCP_RI
);
18197 tcg_temp_free_i32(tdf
);
18198 tcg_temp_free_i32(twd
);
18199 tcg_temp_free_i32(tws
);
18200 tcg_temp_free_i32(timm
);
18203 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18205 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18206 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18207 uint32_t df
= 0, m
= 0;
18208 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18209 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18216 if ((dfm
& 0x40) == 0x00) {
18219 } else if ((dfm
& 0x60) == 0x40) {
18222 } else if ((dfm
& 0x70) == 0x60) {
18225 } else if ((dfm
& 0x78) == 0x70) {
18229 generate_exception_end(ctx
, EXCP_RI
);
18233 tdf
= tcg_const_i32(df
);
18234 tm
= tcg_const_i32(m
);
18235 twd
= tcg_const_i32(wd
);
18236 tws
= tcg_const_i32(ws
);
18238 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18240 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18243 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18246 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18249 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18252 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18255 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18257 case OPC_BINSLI_df
:
18258 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18260 case OPC_BINSRI_df
:
18261 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18264 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18267 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18270 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18273 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18276 MIPS_INVAL("MSA instruction");
18277 generate_exception_end(ctx
, EXCP_RI
);
18281 tcg_temp_free_i32(tdf
);
18282 tcg_temp_free_i32(tm
);
18283 tcg_temp_free_i32(twd
);
18284 tcg_temp_free_i32(tws
);
18287 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18289 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18290 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18291 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18292 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18293 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18295 TCGv_i32 tdf
= tcg_const_i32(df
);
18296 TCGv_i32 twd
= tcg_const_i32(wd
);
18297 TCGv_i32 tws
= tcg_const_i32(ws
);
18298 TCGv_i32 twt
= tcg_const_i32(wt
);
18300 switch (MASK_MSA_3R(ctx
->opcode
)) {
18302 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18305 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18308 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18311 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18313 case OPC_SUBS_S_df
:
18314 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18317 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18320 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18323 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18326 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18329 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18331 case OPC_ADDS_A_df
:
18332 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18334 case OPC_SUBS_U_df
:
18335 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18338 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18341 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18344 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18347 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18350 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18353 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18355 case OPC_ADDS_S_df
:
18356 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18358 case OPC_SUBSUS_U_df
:
18359 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18362 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18365 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18368 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18371 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18374 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18377 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18379 case OPC_ADDS_U_df
:
18380 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18382 case OPC_SUBSUU_S_df
:
18383 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18386 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18389 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18392 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18395 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18398 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18400 case OPC_ASUB_S_df
:
18401 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18404 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18407 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18410 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18413 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18416 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18419 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18421 case OPC_ASUB_U_df
:
18422 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18425 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18428 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18431 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18434 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18436 case OPC_AVER_S_df
:
18437 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18440 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18443 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18446 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18449 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18451 case OPC_AVER_U_df
:
18452 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18455 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18458 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18461 case OPC_DOTP_S_df
:
18462 case OPC_DOTP_U_df
:
18463 case OPC_DPADD_S_df
:
18464 case OPC_DPADD_U_df
:
18465 case OPC_DPSUB_S_df
:
18466 case OPC_HADD_S_df
:
18467 case OPC_DPSUB_U_df
:
18468 case OPC_HADD_U_df
:
18469 case OPC_HSUB_S_df
:
18470 case OPC_HSUB_U_df
:
18471 if (df
== DF_BYTE
) {
18472 generate_exception_end(ctx
, EXCP_RI
);
18475 switch (MASK_MSA_3R(ctx
->opcode
)) {
18476 case OPC_DOTP_S_df
:
18477 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18479 case OPC_DOTP_U_df
:
18480 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18482 case OPC_DPADD_S_df
:
18483 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18485 case OPC_DPADD_U_df
:
18486 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18488 case OPC_DPSUB_S_df
:
18489 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18491 case OPC_HADD_S_df
:
18492 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18494 case OPC_DPSUB_U_df
:
18495 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18497 case OPC_HADD_U_df
:
18498 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18500 case OPC_HSUB_S_df
:
18501 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18503 case OPC_HSUB_U_df
:
18504 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18509 MIPS_INVAL("MSA instruction");
18510 generate_exception_end(ctx
, EXCP_RI
);
18513 tcg_temp_free_i32(twd
);
18514 tcg_temp_free_i32(tws
);
18515 tcg_temp_free_i32(twt
);
18516 tcg_temp_free_i32(tdf
);
18519 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18521 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18522 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18523 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18524 TCGv telm
= tcg_temp_new();
18525 TCGv_i32 tsr
= tcg_const_i32(source
);
18526 TCGv_i32 tdt
= tcg_const_i32(dest
);
18528 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18530 gen_load_gpr(telm
, source
);
18531 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18534 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18535 gen_store_gpr(telm
, dest
);
18538 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18541 MIPS_INVAL("MSA instruction");
18542 generate_exception_end(ctx
, EXCP_RI
);
18546 tcg_temp_free(telm
);
18547 tcg_temp_free_i32(tdt
);
18548 tcg_temp_free_i32(tsr
);
18551 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18554 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18555 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18556 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18558 TCGv_i32 tws
= tcg_const_i32(ws
);
18559 TCGv_i32 twd
= tcg_const_i32(wd
);
18560 TCGv_i32 tn
= tcg_const_i32(n
);
18561 TCGv_i32 tdf
= tcg_const_i32(df
);
18563 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18565 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18567 case OPC_SPLATI_df
:
18568 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18571 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18573 case OPC_COPY_S_df
:
18574 case OPC_COPY_U_df
:
18575 case OPC_INSERT_df
:
18576 #if !defined(TARGET_MIPS64)
18577 /* Double format valid only for MIPS64 */
18578 if (df
== DF_DOUBLE
) {
18579 generate_exception_end(ctx
, EXCP_RI
);
18583 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18584 case OPC_COPY_S_df
:
18585 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18587 case OPC_COPY_U_df
:
18588 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18590 case OPC_INSERT_df
:
18591 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18596 MIPS_INVAL("MSA instruction");
18597 generate_exception_end(ctx
, EXCP_RI
);
18599 tcg_temp_free_i32(twd
);
18600 tcg_temp_free_i32(tws
);
18601 tcg_temp_free_i32(tn
);
18602 tcg_temp_free_i32(tdf
);
18605 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18607 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18608 uint32_t df
= 0, n
= 0;
18610 if ((dfn
& 0x30) == 0x00) {
18613 } else if ((dfn
& 0x38) == 0x20) {
18616 } else if ((dfn
& 0x3c) == 0x30) {
18619 } else if ((dfn
& 0x3e) == 0x38) {
18622 } else if (dfn
== 0x3E) {
18623 /* CTCMSA, CFCMSA, MOVE.V */
18624 gen_msa_elm_3e(env
, ctx
);
18627 generate_exception_end(ctx
, EXCP_RI
);
18631 gen_msa_elm_df(env
, ctx
, df
, n
);
18634 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18636 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18637 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18638 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18639 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18640 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18642 TCGv_i32 twd
= tcg_const_i32(wd
);
18643 TCGv_i32 tws
= tcg_const_i32(ws
);
18644 TCGv_i32 twt
= tcg_const_i32(wt
);
18645 TCGv_i32 tdf
= tcg_temp_new_i32();
18647 /* adjust df value for floating-point instruction */
18648 tcg_gen_movi_i32(tdf
, df
+ 2);
18650 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18652 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18655 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18658 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18661 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18664 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18667 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18670 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18673 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18676 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18679 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18682 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18685 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18688 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18691 tcg_gen_movi_i32(tdf
, df
+ 1);
18692 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18695 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18698 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18700 case OPC_MADD_Q_df
:
18701 tcg_gen_movi_i32(tdf
, df
+ 1);
18702 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18705 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18707 case OPC_MSUB_Q_df
:
18708 tcg_gen_movi_i32(tdf
, df
+ 1);
18709 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18712 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18715 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18718 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18721 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18724 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18727 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18730 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18733 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18736 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18739 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18742 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18745 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18748 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18750 case OPC_MULR_Q_df
:
18751 tcg_gen_movi_i32(tdf
, df
+ 1);
18752 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18755 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18757 case OPC_FMIN_A_df
:
18758 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18760 case OPC_MADDR_Q_df
:
18761 tcg_gen_movi_i32(tdf
, df
+ 1);
18762 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18765 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18768 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18770 case OPC_MSUBR_Q_df
:
18771 tcg_gen_movi_i32(tdf
, df
+ 1);
18772 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18775 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18777 case OPC_FMAX_A_df
:
18778 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18781 MIPS_INVAL("MSA instruction");
18782 generate_exception_end(ctx
, EXCP_RI
);
18786 tcg_temp_free_i32(twd
);
18787 tcg_temp_free_i32(tws
);
18788 tcg_temp_free_i32(twt
);
18789 tcg_temp_free_i32(tdf
);
18792 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18794 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18795 (op & (0x7 << 18)))
18796 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18797 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18798 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18799 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18800 TCGv_i32 twd
= tcg_const_i32(wd
);
18801 TCGv_i32 tws
= tcg_const_i32(ws
);
18802 TCGv_i32 twt
= tcg_const_i32(wt
);
18803 TCGv_i32 tdf
= tcg_const_i32(df
);
18805 switch (MASK_MSA_2R(ctx
->opcode
)) {
18807 #if !defined(TARGET_MIPS64)
18808 /* Double format valid only for MIPS64 */
18809 if (df
== DF_DOUBLE
) {
18810 generate_exception_end(ctx
, EXCP_RI
);
18814 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18817 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18820 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18823 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18826 MIPS_INVAL("MSA instruction");
18827 generate_exception_end(ctx
, EXCP_RI
);
18831 tcg_temp_free_i32(twd
);
18832 tcg_temp_free_i32(tws
);
18833 tcg_temp_free_i32(twt
);
18834 tcg_temp_free_i32(tdf
);
18837 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18839 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18840 (op & (0xf << 17)))
18841 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18842 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18843 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18844 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18845 TCGv_i32 twd
= tcg_const_i32(wd
);
18846 TCGv_i32 tws
= tcg_const_i32(ws
);
18847 TCGv_i32 twt
= tcg_const_i32(wt
);
18848 /* adjust df value for floating-point instruction */
18849 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18851 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18852 case OPC_FCLASS_df
:
18853 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18855 case OPC_FTRUNC_S_df
:
18856 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18858 case OPC_FTRUNC_U_df
:
18859 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18862 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18864 case OPC_FRSQRT_df
:
18865 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18868 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18871 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18874 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18876 case OPC_FEXUPL_df
:
18877 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18879 case OPC_FEXUPR_df
:
18880 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18883 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18886 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18888 case OPC_FTINT_S_df
:
18889 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18891 case OPC_FTINT_U_df
:
18892 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18894 case OPC_FFINT_S_df
:
18895 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18897 case OPC_FFINT_U_df
:
18898 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18902 tcg_temp_free_i32(twd
);
18903 tcg_temp_free_i32(tws
);
18904 tcg_temp_free_i32(twt
);
18905 tcg_temp_free_i32(tdf
);
18908 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18910 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18911 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18912 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18913 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18914 TCGv_i32 twd
= tcg_const_i32(wd
);
18915 TCGv_i32 tws
= tcg_const_i32(ws
);
18916 TCGv_i32 twt
= tcg_const_i32(wt
);
18918 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18920 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18923 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18926 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18929 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18932 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18935 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18938 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18941 MIPS_INVAL("MSA instruction");
18942 generate_exception_end(ctx
, EXCP_RI
);
18946 tcg_temp_free_i32(twd
);
18947 tcg_temp_free_i32(tws
);
18948 tcg_temp_free_i32(twt
);
18951 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18953 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18961 gen_msa_vec_v(env
, ctx
);
18964 gen_msa_2r(env
, ctx
);
18967 gen_msa_2rf(env
, ctx
);
18970 MIPS_INVAL("MSA instruction");
18971 generate_exception_end(ctx
, EXCP_RI
);
18976 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18978 uint32_t opcode
= ctx
->opcode
;
18979 check_insn(ctx
, ASE_MSA
);
18980 check_msa_access(ctx
);
18982 switch (MASK_MSA_MINOR(opcode
)) {
18983 case OPC_MSA_I8_00
:
18984 case OPC_MSA_I8_01
:
18985 case OPC_MSA_I8_02
:
18986 gen_msa_i8(env
, ctx
);
18988 case OPC_MSA_I5_06
:
18989 case OPC_MSA_I5_07
:
18990 gen_msa_i5(env
, ctx
);
18992 case OPC_MSA_BIT_09
:
18993 case OPC_MSA_BIT_0A
:
18994 gen_msa_bit(env
, ctx
);
18996 case OPC_MSA_3R_0D
:
18997 case OPC_MSA_3R_0E
:
18998 case OPC_MSA_3R_0F
:
18999 case OPC_MSA_3R_10
:
19000 case OPC_MSA_3R_11
:
19001 case OPC_MSA_3R_12
:
19002 case OPC_MSA_3R_13
:
19003 case OPC_MSA_3R_14
:
19004 case OPC_MSA_3R_15
:
19005 gen_msa_3r(env
, ctx
);
19008 gen_msa_elm(env
, ctx
);
19010 case OPC_MSA_3RF_1A
:
19011 case OPC_MSA_3RF_1B
:
19012 case OPC_MSA_3RF_1C
:
19013 gen_msa_3rf(env
, ctx
);
19016 gen_msa_vec(env
, ctx
);
19027 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19028 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19029 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19030 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19032 TCGv_i32 twd
= tcg_const_i32(wd
);
19033 TCGv taddr
= tcg_temp_new();
19034 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19036 switch (MASK_MSA_MINOR(opcode
)) {
19038 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19041 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19044 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19047 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19050 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19053 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19056 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19059 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19063 tcg_temp_free_i32(twd
);
19064 tcg_temp_free(taddr
);
19068 MIPS_INVAL("MSA instruction");
19069 generate_exception_end(ctx
, EXCP_RI
);
19075 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19078 int rs
, rt
, rd
, sa
;
19082 /* make sure instructions are on a word boundary */
19083 if (ctx
->pc
& 0x3) {
19084 env
->CP0_BadVAddr
= ctx
->pc
;
19085 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19089 /* Handle blikely not taken case */
19090 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19091 TCGLabel
*l1
= gen_new_label();
19093 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19094 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19095 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19099 op
= MASK_OP_MAJOR(ctx
->opcode
);
19100 rs
= (ctx
->opcode
>> 21) & 0x1f;
19101 rt
= (ctx
->opcode
>> 16) & 0x1f;
19102 rd
= (ctx
->opcode
>> 11) & 0x1f;
19103 sa
= (ctx
->opcode
>> 6) & 0x1f;
19104 imm
= (int16_t)ctx
->opcode
;
19107 decode_opc_special(env
, ctx
);
19110 decode_opc_special2_legacy(env
, ctx
);
19113 decode_opc_special3(env
, ctx
);
19116 op1
= MASK_REGIMM(ctx
->opcode
);
19118 case OPC_BLTZL
: /* REGIMM branches */
19122 check_insn(ctx
, ISA_MIPS2
);
19123 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19127 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19131 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19133 /* OPC_NAL, OPC_BAL */
19134 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19136 generate_exception_end(ctx
, EXCP_RI
);
19139 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19142 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19144 check_insn(ctx
, ISA_MIPS2
);
19145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19146 gen_trap(ctx
, op1
, rs
, -1, imm
);
19149 check_insn(ctx
, ISA_MIPS32R6
);
19150 generate_exception_end(ctx
, EXCP_RI
);
19153 check_insn(ctx
, ISA_MIPS32R2
);
19154 /* Break the TB to be able to sync copied instructions
19156 ctx
->bstate
= BS_STOP
;
19158 case OPC_BPOSGE32
: /* MIPS DSP branch */
19159 #if defined(TARGET_MIPS64)
19163 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19165 #if defined(TARGET_MIPS64)
19167 check_insn(ctx
, ISA_MIPS32R6
);
19168 check_mips_64(ctx
);
19170 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19174 check_insn(ctx
, ISA_MIPS32R6
);
19175 check_mips_64(ctx
);
19177 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19181 default: /* Invalid */
19182 MIPS_INVAL("regimm");
19183 generate_exception_end(ctx
, EXCP_RI
);
19188 check_cp0_enabled(ctx
);
19189 op1
= MASK_CP0(ctx
->opcode
);
19197 #if defined(TARGET_MIPS64)
19201 #ifndef CONFIG_USER_ONLY
19202 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19203 #endif /* !CONFIG_USER_ONLY */
19205 case OPC_C0_FIRST
... OPC_C0_LAST
:
19206 #ifndef CONFIG_USER_ONLY
19207 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19208 #endif /* !CONFIG_USER_ONLY */
19211 #ifndef CONFIG_USER_ONLY
19214 TCGv t0
= tcg_temp_new();
19216 op2
= MASK_MFMC0(ctx
->opcode
);
19219 check_insn(ctx
, ASE_MT
);
19220 gen_helper_dmt(t0
);
19221 gen_store_gpr(t0
, rt
);
19224 check_insn(ctx
, ASE_MT
);
19225 gen_helper_emt(t0
);
19226 gen_store_gpr(t0
, rt
);
19229 check_insn(ctx
, ASE_MT
);
19230 gen_helper_dvpe(t0
, cpu_env
);
19231 gen_store_gpr(t0
, rt
);
19234 check_insn(ctx
, ASE_MT
);
19235 gen_helper_evpe(t0
, cpu_env
);
19236 gen_store_gpr(t0
, rt
);
19239 check_insn(ctx
, ISA_MIPS32R6
);
19241 gen_helper_dvp(t0
, cpu_env
);
19242 gen_store_gpr(t0
, rt
);
19246 check_insn(ctx
, ISA_MIPS32R6
);
19248 gen_helper_evp(t0
, cpu_env
);
19249 gen_store_gpr(t0
, rt
);
19253 check_insn(ctx
, ISA_MIPS32R2
);
19254 save_cpu_state(ctx
, 1);
19255 gen_helper_di(t0
, cpu_env
);
19256 gen_store_gpr(t0
, rt
);
19257 /* Stop translation as we may have switched
19258 the execution mode. */
19259 ctx
->bstate
= BS_STOP
;
19262 check_insn(ctx
, ISA_MIPS32R2
);
19263 save_cpu_state(ctx
, 1);
19264 gen_helper_ei(t0
, cpu_env
);
19265 gen_store_gpr(t0
, rt
);
19266 /* Stop translation as we may have switched
19267 the execution mode. */
19268 ctx
->bstate
= BS_STOP
;
19270 default: /* Invalid */
19271 MIPS_INVAL("mfmc0");
19272 generate_exception_end(ctx
, EXCP_RI
);
19277 #endif /* !CONFIG_USER_ONLY */
19280 check_insn(ctx
, ISA_MIPS32R2
);
19281 gen_load_srsgpr(rt
, rd
);
19284 check_insn(ctx
, ISA_MIPS32R2
);
19285 gen_store_srsgpr(rt
, rd
);
19289 generate_exception_end(ctx
, EXCP_RI
);
19293 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19294 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19295 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19296 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19299 /* Arithmetic with immediate opcode */
19300 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19304 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19306 case OPC_SLTI
: /* Set on less than with immediate opcode */
19308 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19310 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19311 case OPC_LUI
: /* OPC_AUI */
19314 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19316 case OPC_J
... OPC_JAL
: /* Jump */
19317 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19318 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19321 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19322 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19324 generate_exception_end(ctx
, EXCP_RI
);
19327 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19328 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19331 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19334 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19335 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19337 generate_exception_end(ctx
, EXCP_RI
);
19340 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19341 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19344 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19347 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19350 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19352 check_insn(ctx
, ISA_MIPS32R6
);
19353 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19354 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19357 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19360 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19362 check_insn(ctx
, ISA_MIPS32R6
);
19363 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19364 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19369 check_insn(ctx
, ISA_MIPS2
);
19370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19374 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19376 case OPC_LL
: /* Load and stores */
19377 check_insn(ctx
, ISA_MIPS2
);
19381 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19383 case OPC_LB
... OPC_LH
:
19384 case OPC_LW
... OPC_LHU
:
19385 gen_ld(ctx
, op
, rt
, rs
, imm
);
19389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19391 case OPC_SB
... OPC_SH
:
19393 gen_st(ctx
, op
, rt
, rs
, imm
);
19396 check_insn(ctx
, ISA_MIPS2
);
19397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19398 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19401 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19402 check_cp0_enabled(ctx
);
19403 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19404 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19405 gen_cache_operation(ctx
, rt
, rs
, imm
);
19407 /* Treat as NOP. */
19410 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19411 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19412 /* Treat as NOP. */
19415 /* Floating point (COP1). */
19420 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19424 op1
= MASK_CP1(ctx
->opcode
);
19429 check_cp1_enabled(ctx
);
19430 check_insn(ctx
, ISA_MIPS32R2
);
19435 check_cp1_enabled(ctx
);
19436 gen_cp1(ctx
, op1
, rt
, rd
);
19438 #if defined(TARGET_MIPS64)
19441 check_cp1_enabled(ctx
);
19442 check_insn(ctx
, ISA_MIPS3
);
19443 check_mips_64(ctx
);
19444 gen_cp1(ctx
, op1
, rt
, rd
);
19447 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19448 check_cp1_enabled(ctx
);
19449 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19451 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19456 check_insn(ctx
, ASE_MIPS3D
);
19457 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19458 (rt
>> 2) & 0x7, imm
<< 2);
19462 check_cp1_enabled(ctx
);
19463 check_insn(ctx
, ISA_MIPS32R6
);
19464 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19468 check_cp1_enabled(ctx
);
19469 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19471 check_insn(ctx
, ASE_MIPS3D
);
19474 check_cp1_enabled(ctx
);
19475 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19476 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19477 (rt
>> 2) & 0x7, imm
<< 2);
19484 check_cp1_enabled(ctx
);
19485 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19491 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19492 check_cp1_enabled(ctx
);
19493 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19495 case R6_OPC_CMP_AF_S
:
19496 case R6_OPC_CMP_UN_S
:
19497 case R6_OPC_CMP_EQ_S
:
19498 case R6_OPC_CMP_UEQ_S
:
19499 case R6_OPC_CMP_LT_S
:
19500 case R6_OPC_CMP_ULT_S
:
19501 case R6_OPC_CMP_LE_S
:
19502 case R6_OPC_CMP_ULE_S
:
19503 case R6_OPC_CMP_SAF_S
:
19504 case R6_OPC_CMP_SUN_S
:
19505 case R6_OPC_CMP_SEQ_S
:
19506 case R6_OPC_CMP_SEUQ_S
:
19507 case R6_OPC_CMP_SLT_S
:
19508 case R6_OPC_CMP_SULT_S
:
19509 case R6_OPC_CMP_SLE_S
:
19510 case R6_OPC_CMP_SULE_S
:
19511 case R6_OPC_CMP_OR_S
:
19512 case R6_OPC_CMP_UNE_S
:
19513 case R6_OPC_CMP_NE_S
:
19514 case R6_OPC_CMP_SOR_S
:
19515 case R6_OPC_CMP_SUNE_S
:
19516 case R6_OPC_CMP_SNE_S
:
19517 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19519 case R6_OPC_CMP_AF_D
:
19520 case R6_OPC_CMP_UN_D
:
19521 case R6_OPC_CMP_EQ_D
:
19522 case R6_OPC_CMP_UEQ_D
:
19523 case R6_OPC_CMP_LT_D
:
19524 case R6_OPC_CMP_ULT_D
:
19525 case R6_OPC_CMP_LE_D
:
19526 case R6_OPC_CMP_ULE_D
:
19527 case R6_OPC_CMP_SAF_D
:
19528 case R6_OPC_CMP_SUN_D
:
19529 case R6_OPC_CMP_SEQ_D
:
19530 case R6_OPC_CMP_SEUQ_D
:
19531 case R6_OPC_CMP_SLT_D
:
19532 case R6_OPC_CMP_SULT_D
:
19533 case R6_OPC_CMP_SLE_D
:
19534 case R6_OPC_CMP_SULE_D
:
19535 case R6_OPC_CMP_OR_D
:
19536 case R6_OPC_CMP_UNE_D
:
19537 case R6_OPC_CMP_NE_D
:
19538 case R6_OPC_CMP_SOR_D
:
19539 case R6_OPC_CMP_SUNE_D
:
19540 case R6_OPC_CMP_SNE_D
:
19541 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19544 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19545 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19550 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19565 check_insn(ctx
, ASE_MSA
);
19566 gen_msa_branch(env
, ctx
, op1
);
19570 generate_exception_end(ctx
, EXCP_RI
);
19575 /* Compact branches [R6] and COP2 [non-R6] */
19576 case OPC_BC
: /* OPC_LWC2 */
19577 case OPC_BALC
: /* OPC_SWC2 */
19578 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19579 /* OPC_BC, OPC_BALC */
19580 gen_compute_compact_branch(ctx
, op
, 0, 0,
19581 sextract32(ctx
->opcode
<< 2, 0, 28));
19583 /* OPC_LWC2, OPC_SWC2 */
19584 /* COP2: Not implemented. */
19585 generate_exception_err(ctx
, EXCP_CpU
, 2);
19588 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19589 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19590 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19592 /* OPC_BEQZC, OPC_BNEZC */
19593 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19594 sextract32(ctx
->opcode
<< 2, 0, 23));
19596 /* OPC_JIC, OPC_JIALC */
19597 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19600 /* OPC_LWC2, OPC_SWC2 */
19601 /* COP2: Not implemented. */
19602 generate_exception_err(ctx
, EXCP_CpU
, 2);
19606 check_insn(ctx
, INSN_LOONGSON2F
);
19607 /* Note that these instructions use different fields. */
19608 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19613 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19614 check_cp1_enabled(ctx
);
19615 op1
= MASK_CP3(ctx
->opcode
);
19619 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19625 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19626 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19629 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19630 /* Treat as NOP. */
19633 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19647 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19648 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19652 generate_exception_end(ctx
, EXCP_RI
);
19656 generate_exception_err(ctx
, EXCP_CpU
, 1);
19660 #if defined(TARGET_MIPS64)
19661 /* MIPS64 opcodes */
19662 case OPC_LDL
... OPC_LDR
:
19664 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19668 check_insn(ctx
, ISA_MIPS3
);
19669 check_mips_64(ctx
);
19670 gen_ld(ctx
, op
, rt
, rs
, imm
);
19672 case OPC_SDL
... OPC_SDR
:
19673 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19676 check_insn(ctx
, ISA_MIPS3
);
19677 check_mips_64(ctx
);
19678 gen_st(ctx
, op
, rt
, rs
, imm
);
19681 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19682 check_insn(ctx
, ISA_MIPS3
);
19683 check_mips_64(ctx
);
19684 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19686 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19687 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19688 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19689 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19692 check_insn(ctx
, ISA_MIPS3
);
19693 check_mips_64(ctx
);
19694 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19698 check_insn(ctx
, ISA_MIPS3
);
19699 check_mips_64(ctx
);
19700 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19703 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19704 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19705 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19707 MIPS_INVAL("major opcode");
19708 generate_exception_end(ctx
, EXCP_RI
);
19712 case OPC_DAUI
: /* OPC_JALX */
19713 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19714 #if defined(TARGET_MIPS64)
19716 check_mips_64(ctx
);
19718 generate_exception(ctx
, EXCP_RI
);
19719 } else if (rt
!= 0) {
19720 TCGv t0
= tcg_temp_new();
19721 gen_load_gpr(t0
, rs
);
19722 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19726 generate_exception_end(ctx
, EXCP_RI
);
19727 MIPS_INVAL("major opcode");
19731 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19732 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19733 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19736 case OPC_MSA
: /* OPC_MDMX */
19737 /* MDMX: Not implemented. */
19741 check_insn(ctx
, ISA_MIPS32R6
);
19742 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19744 default: /* Invalid */
19745 MIPS_INVAL("major opcode");
19746 generate_exception_end(ctx
, EXCP_RI
);
19751 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19753 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19754 CPUState
*cs
= CPU(cpu
);
19756 target_ulong pc_start
;
19757 target_ulong next_page_start
;
19764 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19767 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19768 ctx
.insn_flags
= env
->insn_flags
;
19769 ctx
.CP0_Config1
= env
->CP0_Config1
;
19771 ctx
.bstate
= BS_NONE
;
19773 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19774 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19775 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19776 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19777 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19778 ctx
.PAMask
= env
->PAMask
;
19779 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19780 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19781 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19782 /* Restore delay slot state from the tb context. */
19783 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19784 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19785 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19786 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19787 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19788 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19789 restore_cpu_state(env
, &ctx
);
19790 #ifdef CONFIG_USER_ONLY
19791 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19793 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19795 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19796 MO_UNALN
: MO_ALIGN
;
19798 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19799 if (max_insns
== 0) {
19800 max_insns
= CF_COUNT_MASK
;
19802 if (max_insns
> TCG_MAX_INSNS
) {
19803 max_insns
= TCG_MAX_INSNS
;
19806 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19808 while (ctx
.bstate
== BS_NONE
) {
19809 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19812 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19813 save_cpu_state(&ctx
, 1);
19814 ctx
.bstate
= BS_BRANCH
;
19815 gen_helper_raise_exception_debug(cpu_env
);
19816 /* The address covered by the breakpoint must be included in
19817 [tb->pc, tb->pc + tb->size) in order to for it to be
19818 properly cleared -- thus we increment the PC here so that
19819 the logic setting tb->size below does the right thing. */
19821 goto done_generating
;
19824 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19828 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19829 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19830 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19832 decode_opc(env
, &ctx
);
19833 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19834 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19835 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19836 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19837 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19838 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19840 generate_exception_end(&ctx
, EXCP_RI
);
19844 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19845 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19846 MIPS_HFLAG_FBNSLOT
))) {
19847 /* force to generate branch as there is neither delay nor
19851 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19852 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19853 /* Force to generate branch as microMIPS R6 doesn't restrict
19854 branches in the forbidden slot. */
19859 gen_branch(&ctx
, insn_bytes
);
19861 ctx
.pc
+= insn_bytes
;
19863 /* Execute a branch and its delay slot as a single instruction.
19864 This is what GDB expects and is consistent with what the
19865 hardware does (e.g. if a delay slot instruction faults, the
19866 reported PC is the PC of the branch). */
19867 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19871 if (ctx
.pc
>= next_page_start
) {
19875 if (tcg_op_buf_full()) {
19879 if (num_insns
>= max_insns
)
19885 if (tb
->cflags
& CF_LAST_IO
) {
19888 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19889 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19890 gen_helper_raise_exception_debug(cpu_env
);
19892 switch (ctx
.bstate
) {
19894 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19897 save_cpu_state(&ctx
, 0);
19898 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19901 tcg_gen_exit_tb(0);
19909 gen_tb_end(tb
, num_insns
);
19911 tb
->size
= ctx
.pc
- pc_start
;
19912 tb
->icount
= num_insns
;
19916 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19917 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19918 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19924 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19928 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19930 #define printfpr(fp) \
19933 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19934 " fd:%13g fs:%13g psu: %13g\n", \
19935 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19936 (double)(fp)->fd, \
19937 (double)(fp)->fs[FP_ENDIAN_IDX], \
19938 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19941 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19942 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19943 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19944 " fd:%13g fs:%13g psu:%13g\n", \
19945 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19947 (double)tmp.fs[FP_ENDIAN_IDX], \
19948 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19953 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19954 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19955 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19956 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19957 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19958 printfpr(&env
->active_fpu
.fpr
[i
]);
19964 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19967 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19968 CPUMIPSState
*env
= &cpu
->env
;
19971 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19972 " LO=0x" TARGET_FMT_lx
" ds %04x "
19973 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19974 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19975 env
->hflags
, env
->btarget
, env
->bcond
);
19976 for (i
= 0; i
< 32; i
++) {
19978 cpu_fprintf(f
, "GPR%02d:", i
);
19979 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19981 cpu_fprintf(f
, "\n");
19984 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19985 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19986 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19988 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19989 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19990 env
->CP0_Config2
, env
->CP0_Config3
);
19991 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19992 env
->CP0_Config4
, env
->CP0_Config5
);
19993 if (env
->hflags
& MIPS_HFLAG_FPU
)
19994 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19997 void mips_tcg_init(void)
20002 /* Initialize various static tables. */
20006 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20008 TCGV_UNUSED(cpu_gpr
[0]);
20009 for (i
= 1; i
< 32; i
++)
20010 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20011 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20014 for (i
= 0; i
< 32; i
++) {
20015 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20017 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20018 /* The scalar floating-point unit (FPU) registers are mapped on
20019 * the MSA vector registers. */
20020 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20021 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20022 msa_wr_d
[i
* 2 + 1] =
20023 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20026 cpu_PC
= tcg_global_mem_new(cpu_env
,
20027 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20028 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20029 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20030 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20032 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20033 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20036 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20037 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20039 bcond
= tcg_global_mem_new(cpu_env
,
20040 offsetof(CPUMIPSState
, bcond
), "bcond");
20041 btarget
= tcg_global_mem_new(cpu_env
,
20042 offsetof(CPUMIPSState
, btarget
), "btarget");
20043 hflags
= tcg_global_mem_new_i32(cpu_env
,
20044 offsetof(CPUMIPSState
, hflags
), "hflags");
20046 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20047 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20049 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20050 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20056 #include "translate_init.c"
20058 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20062 const mips_def_t
*def
;
20064 def
= cpu_mips_find_by_name(cpu_model
);
20067 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20069 env
->cpu_model
= def
;
20071 #ifndef CONFIG_USER_ONLY
20072 mmu_init(env
, def
);
20074 fpu_init(env
, def
);
20075 mvp_init(env
, def
);
20077 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20082 bool cpu_supports_cps_smp(const char *cpu_model
)
20084 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20089 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20092 void cpu_state_reset(CPUMIPSState
*env
)
20094 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20095 CPUState
*cs
= CPU(cpu
);
20097 /* Reset registers to their default values */
20098 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20099 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20100 #ifdef TARGET_WORDS_BIGENDIAN
20101 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20103 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20104 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20105 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20106 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20107 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20108 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20109 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20110 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20111 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20112 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20113 << env
->cpu_model
->CP0_LLAddr_shift
;
20114 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20115 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20116 env
->CCRes
= env
->cpu_model
->CCRes
;
20117 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20118 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20119 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20120 env
->current_tc
= 0;
20121 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20122 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20123 #if defined(TARGET_MIPS64)
20124 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20125 env
->SEGMask
|= 3ULL << 62;
20128 env
->PABITS
= env
->cpu_model
->PABITS
;
20129 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20130 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20131 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20132 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20133 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20134 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20135 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20136 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20137 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20138 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20139 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20140 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20141 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20142 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20143 env
->msair
= env
->cpu_model
->MSAIR
;
20144 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20146 #if defined(CONFIG_USER_ONLY)
20147 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20148 # ifdef TARGET_MIPS64
20149 /* Enable 64-bit register mode. */
20150 env
->CP0_Status
|= (1 << CP0St_PX
);
20152 # ifdef TARGET_ABI_MIPSN64
20153 /* Enable 64-bit address mode. */
20154 env
->CP0_Status
|= (1 << CP0St_UX
);
20156 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20157 hardware registers. */
20158 env
->CP0_HWREna
|= 0x0000000F;
20159 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20160 env
->CP0_Status
|= (1 << CP0St_CU1
);
20162 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20163 env
->CP0_Status
|= (1 << CP0St_MX
);
20165 # if defined(TARGET_MIPS64)
20166 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20167 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20168 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20169 env
->CP0_Status
|= (1 << CP0St_FR
);
20173 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20174 /* If the exception was raised from a delay slot,
20175 come back to the jump. */
20176 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20177 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20179 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20181 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20182 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20183 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20184 env
->CP0_Wired
= 0;
20185 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20186 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20187 if (kvm_enabled()) {
20188 env
->CP0_EBase
|= 0x40000000;
20190 env
->CP0_EBase
|= 0x80000000;
20192 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20193 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20195 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20196 /* vectored interrupts not implemented, timer on int 7,
20197 no performance counters. */
20198 env
->CP0_IntCtl
= 0xe0000000;
20202 for (i
= 0; i
< 7; i
++) {
20203 env
->CP0_WatchLo
[i
] = 0;
20204 env
->CP0_WatchHi
[i
] = 0x80000000;
20206 env
->CP0_WatchLo
[7] = 0;
20207 env
->CP0_WatchHi
[7] = 0;
20209 /* Count register increments in debug mode, EJTAG version 1 */
20210 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20212 cpu_mips_store_count(env
, 1);
20214 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20217 /* Only TC0 on VPE 0 starts as active. */
20218 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20219 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20220 env
->tcs
[i
].CP0_TCHalt
= 1;
20222 env
->active_tc
.CP0_TCHalt
= 1;
20225 if (cs
->cpu_index
== 0) {
20226 /* VPE0 starts up enabled. */
20227 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20228 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20230 /* TC0 starts up unhalted. */
20232 env
->active_tc
.CP0_TCHalt
= 0;
20233 env
->tcs
[0].CP0_TCHalt
= 0;
20234 /* With thread 0 active. */
20235 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20236 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20240 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20241 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20242 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20243 env
->CP0_Status
|= (1 << CP0St_FR
);
20247 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20251 compute_hflags(env
);
20252 restore_rounding_mode(env
);
20253 restore_flush_mode(env
);
20254 restore_pamask(env
);
20255 cs
->exception_index
= EXCP_NONE
;
20257 if (semihosting_get_argc()) {
20258 /* UHI interface can be used to obtain argc and argv */
20259 env
->active_tc
.gpr
[4] = -1;
20263 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20264 target_ulong
*data
)
20266 env
->active_tc
.PC
= data
[0];
20267 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20268 env
->hflags
|= data
[1];
20269 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20270 case MIPS_HFLAG_BR
:
20272 case MIPS_HFLAG_BC
:
20273 case MIPS_HFLAG_BL
:
20275 env
->btarget
= data
[2];