2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "trace-tcg.h"
39 #define MIPS_DEBUG_DISAS 0
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL
= (0x00 << 26),
47 OPC_REGIMM
= (0x01 << 26),
48 OPC_CP0
= (0x10 << 26),
49 OPC_CP1
= (0x11 << 26),
50 OPC_CP2
= (0x12 << 26),
51 OPC_CP3
= (0x13 << 26),
52 OPC_SPECIAL2
= (0x1C << 26),
53 OPC_SPECIAL3
= (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI
= (0x08 << 26),
56 OPC_ADDIU
= (0x09 << 26),
57 OPC_SLTI
= (0x0A << 26),
58 OPC_SLTIU
= (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI
= (0x0C << 26),
61 OPC_ORI
= (0x0D << 26),
62 OPC_XORI
= (0x0E << 26),
63 OPC_LUI
= (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26),
79 OPC_DAUI
= (0x1D << 26),
81 OPC_LDL
= (0x1A << 26),
82 OPC_LDR
= (0x1B << 26),
83 OPC_LB
= (0x20 << 26),
84 OPC_LH
= (0x21 << 26),
85 OPC_LWL
= (0x22 << 26),
86 OPC_LW
= (0x23 << 26),
87 OPC_LWPC
= OPC_LW
| 0x5,
88 OPC_LBU
= (0x24 << 26),
89 OPC_LHU
= (0x25 << 26),
90 OPC_LWR
= (0x26 << 26),
91 OPC_LWU
= (0x27 << 26),
92 OPC_SB
= (0x28 << 26),
93 OPC_SH
= (0x29 << 26),
94 OPC_SWL
= (0x2A << 26),
95 OPC_SW
= (0x2B << 26),
96 OPC_SDL
= (0x2C << 26),
97 OPC_SDR
= (0x2D << 26),
98 OPC_SWR
= (0x2E << 26),
99 OPC_LL
= (0x30 << 26),
100 OPC_LLD
= (0x34 << 26),
101 OPC_LD
= (0x37 << 26),
102 OPC_LDPC
= OPC_LD
| 0x5,
103 OPC_SC
= (0x38 << 26),
104 OPC_SCD
= (0x3C << 26),
105 OPC_SD
= (0x3F << 26),
106 /* Floating point load/store */
107 OPC_LWC1
= (0x31 << 26),
108 OPC_LWC2
= (0x32 << 26),
109 OPC_LDC1
= (0x35 << 26),
110 OPC_LDC2
= (0x36 << 26),
111 OPC_SWC1
= (0x39 << 26),
112 OPC_SWC2
= (0x3A << 26),
113 OPC_SDC1
= (0x3D << 26),
114 OPC_SDC2
= (0x3E << 26),
115 /* Compact Branches */
116 OPC_BLEZALC
= (0x06 << 26),
117 OPC_BGEZALC
= (0x06 << 26),
118 OPC_BGEUC
= (0x06 << 26),
119 OPC_BGTZALC
= (0x07 << 26),
120 OPC_BLTZALC
= (0x07 << 26),
121 OPC_BLTUC
= (0x07 << 26),
122 OPC_BOVC
= (0x08 << 26),
123 OPC_BEQZALC
= (0x08 << 26),
124 OPC_BEQC
= (0x08 << 26),
125 OPC_BLEZC
= (0x16 << 26),
126 OPC_BGEZC
= (0x16 << 26),
127 OPC_BGEC
= (0x16 << 26),
128 OPC_BGTZC
= (0x17 << 26),
129 OPC_BLTZC
= (0x17 << 26),
130 OPC_BLTC
= (0x17 << 26),
131 OPC_BNVC
= (0x18 << 26),
132 OPC_BNEZALC
= (0x18 << 26),
133 OPC_BNEC
= (0x18 << 26),
134 OPC_BC
= (0x32 << 26),
135 OPC_BEQZC
= (0x36 << 26),
136 OPC_JIC
= (0x36 << 26),
137 OPC_BALC
= (0x3A << 26),
138 OPC_BNEZC
= (0x3E << 26),
139 OPC_JIALC
= (0x3E << 26),
140 /* MDMX ASE specific */
141 OPC_MDMX
= (0x1E << 26),
142 /* MSA ASE, same as MDMX */
144 /* Cache and prefetch */
145 OPC_CACHE
= (0x2F << 26),
146 OPC_PREF
= (0x33 << 26),
147 /* PC-relative address computation / loads */
148 OPC_PCREL
= (0x3B << 26),
151 /* PC-relative address computation / loads */
152 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
153 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
155 /* Instructions determined by bits 19 and 20 */
156 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
157 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
158 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
160 /* Instructions determined by bits 16 ... 20 */
161 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
162 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
165 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
168 /* MIPS special opcodes */
169 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
173 OPC_SLL
= 0x00 | OPC_SPECIAL
,
174 /* NOP is SLL r0, r0, 0 */
175 /* SSNOP is SLL r0, r0, 1 */
176 /* EHB is SLL r0, r0, 3 */
177 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
178 OPC_ROTR
= OPC_SRL
| (1 << 21),
179 OPC_SRA
= 0x03 | OPC_SPECIAL
,
180 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
181 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
182 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
183 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
184 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
185 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
186 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
187 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
188 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
189 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
190 OPC_DROTR
= OPC_DSRL
| (1 << 21),
191 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
192 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
193 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
194 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
195 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
196 /* Multiplication / division */
197 OPC_MULT
= 0x18 | OPC_SPECIAL
,
198 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
199 OPC_DIV
= 0x1A | OPC_SPECIAL
,
200 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
201 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
202 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
203 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
204 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
206 /* 2 registers arithmetic / logic */
207 OPC_ADD
= 0x20 | OPC_SPECIAL
,
208 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
209 OPC_SUB
= 0x22 | OPC_SPECIAL
,
210 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
211 OPC_AND
= 0x24 | OPC_SPECIAL
,
212 OPC_OR
= 0x25 | OPC_SPECIAL
,
213 OPC_XOR
= 0x26 | OPC_SPECIAL
,
214 OPC_NOR
= 0x27 | OPC_SPECIAL
,
215 OPC_SLT
= 0x2A | OPC_SPECIAL
,
216 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
217 OPC_DADD
= 0x2C | OPC_SPECIAL
,
218 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
219 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
220 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
222 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
223 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
225 OPC_TGE
= 0x30 | OPC_SPECIAL
,
226 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
227 OPC_TLT
= 0x32 | OPC_SPECIAL
,
228 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
229 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
230 OPC_TNE
= 0x36 | OPC_SPECIAL
,
231 /* HI / LO registers load & stores */
232 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
233 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
234 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
235 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
236 /* Conditional moves */
237 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
238 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
240 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
241 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
243 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
246 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
247 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
248 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
249 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
250 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
252 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
253 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
254 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
255 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
258 /* R6 Multiply and Divide instructions have the same Opcode
259 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
260 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
263 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
264 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
265 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
266 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
267 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
268 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
269 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
270 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
272 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
273 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
274 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
275 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
276 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
277 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
278 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
279 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
281 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
282 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
283 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
284 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
285 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
287 OPC_LSA
= 0x05 | OPC_SPECIAL
,
288 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
291 /* Multiplication variants of the vr54xx. */
292 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
295 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
296 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
298 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
300 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
304 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
305 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
306 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
307 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
308 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
311 /* REGIMM (rt field) opcodes */
312 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
315 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
316 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
317 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
318 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
319 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
320 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
321 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
322 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
323 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
324 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
325 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
326 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
327 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
328 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
329 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
330 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
332 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
333 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
336 /* Special2 opcodes */
337 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
340 /* Multiply & xxx operations */
341 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
342 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
343 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
344 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
345 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
347 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
348 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
349 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
350 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
351 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
352 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
353 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
354 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
355 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
356 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
357 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
358 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
360 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
361 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
362 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
363 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
365 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
368 /* Special3 opcodes */
369 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
372 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
373 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
374 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
375 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
376 OPC_INS
= 0x04 | OPC_SPECIAL3
,
377 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
378 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
379 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
380 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
381 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
382 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
383 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
384 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
387 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
388 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
389 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
390 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
391 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
392 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
393 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
394 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
395 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
396 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
397 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
398 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
401 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
402 /* MIPS DSP Arithmetic */
403 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
404 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
407 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
408 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
409 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
410 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
411 /* MIPS DSP GPR-Based Shift Sub-class */
412 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
413 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
414 /* MIPS DSP Multiply Sub-class insns */
415 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
416 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
417 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
418 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
419 /* DSP Bit/Manipulation Sub-class */
420 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
421 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
422 /* MIPS DSP Append Sub-class */
423 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
424 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
425 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
426 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
427 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
430 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
431 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
432 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
433 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
434 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
435 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
439 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
442 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
443 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
444 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
445 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
446 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
447 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
451 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
454 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
455 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
456 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
457 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
458 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
461 /* MIPS DSP REGIMM opcodes */
463 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
464 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
467 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
470 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
471 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
472 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
473 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
476 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
478 /* MIPS DSP Arithmetic Sub-class */
479 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
497 /* MIPS DSP Multiply Sub-class insns */
498 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
506 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
507 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
509 /* MIPS DSP Arithmetic Sub-class */
510 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
522 /* MIPS DSP Multiply Sub-class insns */
523 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
529 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
531 /* MIPS DSP Arithmetic Sub-class */
532 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
545 /* DSP Bit/Manipulation Sub-class */
546 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
553 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
555 /* MIPS DSP Arithmetic Sub-class */
556 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
563 /* DSP Compare-Pick Sub-class */
564 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
581 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
583 /* MIPS DSP GPR-Based Shift Sub-class */
584 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
608 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Multiply Sub-class insns */
611 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
635 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* DSP Bit/Manipulation Sub-class */
638 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
641 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
643 /* MIPS DSP Append Sub-class */
644 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
645 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
646 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
649 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
651 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
652 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
666 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
667 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
668 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
671 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Arithmetic Sub-class */
674 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
691 /* DSP Bit/Manipulation Sub-class */
692 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
700 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Multiply Sub-class insns */
703 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
708 /* MIPS DSP Arithmetic Sub-class */
709 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
732 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* DSP Compare-Pick Sub-class */
735 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
754 /* MIPS DSP Arithmetic Sub-class */
755 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
765 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
767 /* DSP Append Sub-class */
768 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
771 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
774 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
776 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
777 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
800 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
802 /* DSP Bit/Manipulation Sub-class */
803 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
806 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Multiply Sub-class insns */
809 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
837 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
839 /* MIPS DSP GPR-Based Shift Sub-class */
840 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
868 /* Coprocessor 0 (rs field) */
869 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
872 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
873 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
874 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
875 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
876 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
877 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
878 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
879 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
880 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
881 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
882 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
883 OPC_C0
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
889 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
892 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
895 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
896 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
899 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
902 /* Coprocessor 0 (with rs == C0) */
903 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
906 OPC_TLBR
= 0x01 | OPC_C0
,
907 OPC_TLBWI
= 0x02 | OPC_C0
,
908 OPC_TLBINV
= 0x03 | OPC_C0
,
909 OPC_TLBINVF
= 0x04 | OPC_C0
,
910 OPC_TLBWR
= 0x06 | OPC_C0
,
911 OPC_TLBP
= 0x08 | OPC_C0
,
912 OPC_RFE
= 0x10 | OPC_C0
,
913 OPC_ERET
= 0x18 | OPC_C0
,
914 OPC_DERET
= 0x1F | OPC_C0
,
915 OPC_WAIT
= 0x20 | OPC_C0
,
918 /* Coprocessor 1 (rs field) */
919 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
921 /* Values for the fmt field in FP instructions */
923 /* 0 - 15 are reserved */
924 FMT_S
= 16, /* single fp */
925 FMT_D
= 17, /* double fp */
926 FMT_E
= 18, /* extended fp */
927 FMT_Q
= 19, /* quad fp */
928 FMT_W
= 20, /* 32-bit fixed */
929 FMT_L
= 21, /* 64-bit fixed */
930 FMT_PS
= 22, /* paired single fp */
931 /* 23 - 31 are reserved */
935 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
936 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
937 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
938 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
939 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
940 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
941 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
942 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
943 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
944 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
945 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
946 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
947 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
948 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
949 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
950 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
951 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
952 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
953 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
954 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
955 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
956 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
957 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
958 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
959 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
960 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
961 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
962 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
963 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
964 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
967 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
968 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
971 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
972 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
973 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
974 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
978 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
979 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
983 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
984 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
987 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
990 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
991 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
992 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
993 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
994 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
995 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
996 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
997 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
998 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
999 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1000 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1003 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1006 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1015 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1024 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1028 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1029 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1030 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1031 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1033 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1042 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1049 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1056 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1063 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1070 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1077 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1084 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1091 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1100 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1103 OPC_LWXC1
= 0x00 | OPC_CP3
,
1104 OPC_LDXC1
= 0x01 | OPC_CP3
,
1105 OPC_LUXC1
= 0x05 | OPC_CP3
,
1106 OPC_SWXC1
= 0x08 | OPC_CP3
,
1107 OPC_SDXC1
= 0x09 | OPC_CP3
,
1108 OPC_SUXC1
= 0x0D | OPC_CP3
,
1109 OPC_PREFX
= 0x0F | OPC_CP3
,
1110 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1111 OPC_MADD_S
= 0x20 | OPC_CP3
,
1112 OPC_MADD_D
= 0x21 | OPC_CP3
,
1113 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1114 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1115 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1116 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1117 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1118 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1119 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1120 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1121 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1122 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1126 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1128 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1129 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1130 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1131 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1132 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1133 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1134 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1135 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1136 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1137 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1138 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1139 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1140 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1141 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1142 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1143 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1144 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1145 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1146 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1147 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1148 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1150 /* MI10 instruction */
1151 OPC_LD_B
= (0x20) | OPC_MSA
,
1152 OPC_LD_H
= (0x21) | OPC_MSA
,
1153 OPC_LD_W
= (0x22) | OPC_MSA
,
1154 OPC_LD_D
= (0x23) | OPC_MSA
,
1155 OPC_ST_B
= (0x24) | OPC_MSA
,
1156 OPC_ST_H
= (0x25) | OPC_MSA
,
1157 OPC_ST_W
= (0x26) | OPC_MSA
,
1158 OPC_ST_D
= (0x27) | OPC_MSA
,
1162 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1163 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1164 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1165 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1166 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1168 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1169 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1170 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1171 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1172 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1173 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1174 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1176 /* I8 instruction */
1177 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1178 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1180 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1181 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1182 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1183 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1184 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1185 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1186 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1188 /* VEC/2R/2RF instruction */
1189 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1190 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1191 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1192 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1193 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1194 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1195 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1200 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1201 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1202 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1203 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1204 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1206 /* 2RF instruction df(bit 16) = _w, _d */
1207 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1210 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1211 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1212 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1213 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1214 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1217 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1218 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1224 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1225 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1226 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1227 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1228 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1229 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1230 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1231 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1232 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1233 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1234 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1235 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1236 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1237 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1238 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1239 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1240 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1241 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1242 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1243 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1244 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1245 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1246 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1247 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1248 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1249 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1250 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1251 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1252 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1253 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1254 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1255 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1256 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1257 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1258 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1259 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1260 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1261 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1262 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1263 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1264 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1265 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1266 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1267 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1268 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1270 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1271 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1272 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1273 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1274 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1275 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1282 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1283 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1284 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1285 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1286 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1287 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1289 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1290 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1292 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1294 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1296 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1300 /* 3RF instruction _df(bit 21) = _w, _d */
1301 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1302 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1303 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1314 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1317 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1318 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1339 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1343 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1344 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1347 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1348 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1349 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1350 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1351 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1352 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1358 /* global register indices */
1359 static TCGv_env cpu_env
;
1360 static TCGv cpu_gpr
[32], cpu_PC
;
1361 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1362 static TCGv cpu_dspctrl
, btarget
, bcond
;
1363 static TCGv_i32 hflags
;
1364 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1365 static TCGv_i64 fpu_f64
[32];
1366 static TCGv_i64 msa_wr_d
[64];
1368 #include "exec/gen-icount.h"
1370 #define gen_helper_0e0i(name, arg) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1372 gen_helper_##name(cpu_env, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_0e1i(name, arg1, arg2) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1378 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e0i(name, ret, arg1) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1384 gen_helper_##name(ret, cpu_env, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1390 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1402 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1407 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1408 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1409 tcg_temp_free_i32(helper_tmp); \
1412 typedef struct DisasContext
{
1413 struct TranslationBlock
*tb
;
1414 target_ulong pc
, saved_pc
;
1416 int singlestep_enabled
;
1418 int32_t CP0_Config1
;
1419 /* Routine used to access memory */
1421 TCGMemOp default_tcg_memop_mask
;
1422 uint32_t hflags
, saved_hflags
;
1424 target_ulong btarget
;
1433 int CP0_LLAddr_shift
;
1443 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1444 * exception condition */
1445 BS_STOP
= 1, /* We want to stop translation for any reason */
1446 BS_BRANCH
= 2, /* We reached a branch condition */
1447 BS_EXCP
= 3, /* We reached an exception condition */
1450 static const char * const regnames
[] = {
1451 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1452 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1453 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1454 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1457 static const char * const regnames_HI
[] = {
1458 "HI0", "HI1", "HI2", "HI3",
1461 static const char * const regnames_LO
[] = {
1462 "LO0", "LO1", "LO2", "LO3",
1465 static const char * const fregnames
[] = {
1466 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1467 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1468 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1469 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1472 static const char * const msaregnames
[] = {
1473 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1474 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1475 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1476 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1477 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1478 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1479 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1480 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1481 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1482 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1483 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1484 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1485 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1486 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1487 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1488 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1491 #define LOG_DISAS(...) \
1493 if (MIPS_DEBUG_DISAS) { \
1494 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1498 #define MIPS_INVAL(op) \
1500 if (MIPS_DEBUG_DISAS) { \
1501 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1502 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1503 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1504 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1508 /* General purpose registers moves. */
1509 static inline void gen_load_gpr (TCGv t
, int reg
)
1512 tcg_gen_movi_tl(t
, 0);
1514 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1517 static inline void gen_store_gpr (TCGv t
, int reg
)
1520 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1523 /* Moves to/from shadow registers. */
1524 static inline void gen_load_srsgpr (int from
, int to
)
1526 TCGv t0
= tcg_temp_new();
1529 tcg_gen_movi_tl(t0
, 0);
1531 TCGv_i32 t2
= tcg_temp_new_i32();
1532 TCGv_ptr addr
= tcg_temp_new_ptr();
1534 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1535 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1536 tcg_gen_andi_i32(t2
, t2
, 0xf);
1537 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1538 tcg_gen_ext_i32_ptr(addr
, t2
);
1539 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1541 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1542 tcg_temp_free_ptr(addr
);
1543 tcg_temp_free_i32(t2
);
1545 gen_store_gpr(t0
, to
);
1549 static inline void gen_store_srsgpr (int from
, int to
)
1552 TCGv t0
= tcg_temp_new();
1553 TCGv_i32 t2
= tcg_temp_new_i32();
1554 TCGv_ptr addr
= tcg_temp_new_ptr();
1556 gen_load_gpr(t0
, from
);
1557 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1558 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1559 tcg_gen_andi_i32(t2
, t2
, 0xf);
1560 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1561 tcg_gen_ext_i32_ptr(addr
, t2
);
1562 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1564 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1565 tcg_temp_free_ptr(addr
);
1566 tcg_temp_free_i32(t2
);
1572 static inline void gen_save_pc(target_ulong pc
)
1574 tcg_gen_movi_tl(cpu_PC
, pc
);
1577 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1579 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1580 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1581 gen_save_pc(ctx
->pc
);
1582 ctx
->saved_pc
= ctx
->pc
;
1584 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1585 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1586 ctx
->saved_hflags
= ctx
->hflags
;
1587 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1593 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1599 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1601 ctx
->saved_hflags
= ctx
->hflags
;
1602 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1608 ctx
->btarget
= env
->btarget
;
1613 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1615 TCGv_i32 texcp
= tcg_const_i32(excp
);
1616 TCGv_i32 terr
= tcg_const_i32(err
);
1617 save_cpu_state(ctx
, 1);
1618 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1619 tcg_temp_free_i32(terr
);
1620 tcg_temp_free_i32(texcp
);
1621 ctx
->bstate
= BS_EXCP
;
1624 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1626 gen_helper_0e0i(raise_exception
, excp
);
1629 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1631 generate_exception_err(ctx
, excp
, 0);
1634 /* Floating point register moves. */
1635 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1637 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1638 generate_exception(ctx
, EXCP_RI
);
1640 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1643 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1646 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1647 generate_exception(ctx
, EXCP_RI
);
1649 t64
= tcg_temp_new_i64();
1650 tcg_gen_extu_i32_i64(t64
, t
);
1651 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1652 tcg_temp_free_i64(t64
);
1655 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1657 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1658 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1660 gen_load_fpr32(ctx
, t
, reg
| 1);
1664 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1666 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1667 TCGv_i64 t64
= tcg_temp_new_i64();
1668 tcg_gen_extu_i32_i64(t64
, t
);
1669 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1670 tcg_temp_free_i64(t64
);
1672 gen_store_fpr32(ctx
, t
, reg
| 1);
1676 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1681 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1685 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1692 t0
= tcg_temp_new_i64();
1693 tcg_gen_shri_i64(t0
, t
, 32);
1694 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1695 tcg_temp_free_i64(t0
);
1699 static inline int get_fp_bit (int cc
)
1707 /* Addresses computation */
1708 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1710 tcg_gen_add_tl(ret
, arg0
, arg1
);
1712 #if defined(TARGET_MIPS64)
1713 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1714 tcg_gen_ext32s_i64(ret
, ret
);
1719 /* Addresses computation (translation time) */
1720 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1723 target_long sum
= base
+ offset
;
1725 #if defined(TARGET_MIPS64)
1726 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1733 /* Sign-extract the low 32-bits to a target_long. */
1734 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1736 #if defined(TARGET_MIPS64)
1737 tcg_gen_ext32s_i64(ret
, arg
);
1739 tcg_gen_extrl_i64_i32(ret
, arg
);
1743 /* Sign-extract the high 32-bits to a target_long. */
1744 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1746 #if defined(TARGET_MIPS64)
1747 tcg_gen_sari_i64(ret
, arg
, 32);
1749 tcg_gen_extrh_i64_i32(ret
, arg
);
1753 static inline void check_cp0_enabled(DisasContext
*ctx
)
1755 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1756 generate_exception_err(ctx
, EXCP_CpU
, 0);
1759 static inline void check_cp1_enabled(DisasContext
*ctx
)
1761 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1762 generate_exception_err(ctx
, EXCP_CpU
, 1);
1765 /* Verify that the processor is running with COP1X instructions enabled.
1766 This is associated with the nabla symbol in the MIPS32 and MIPS64
1769 static inline void check_cop1x(DisasContext
*ctx
)
1771 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1772 generate_exception_end(ctx
, EXCP_RI
);
1775 /* Verify that the processor is running with 64-bit floating-point
1776 operations enabled. */
1778 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1780 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1781 generate_exception_end(ctx
, EXCP_RI
);
1785 * Verify if floating point register is valid; an operation is not defined
1786 * if bit 0 of any register specification is set and the FR bit in the
1787 * Status register equals zero, since the register numbers specify an
1788 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1789 * in the Status register equals one, both even and odd register numbers
1790 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1792 * Multiple 64 bit wide registers can be checked by calling
1793 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1795 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1798 generate_exception_end(ctx
, EXCP_RI
);
1801 /* Verify that the processor is running with DSP instructions enabled.
1802 This is enabled by CP0 Status register MX(24) bit.
1805 static inline void check_dsp(DisasContext
*ctx
)
1807 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1808 if (ctx
->insn_flags
& ASE_DSP
) {
1809 generate_exception_end(ctx
, EXCP_DSPDIS
);
1811 generate_exception_end(ctx
, EXCP_RI
);
1816 static inline void check_dspr2(DisasContext
*ctx
)
1818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1819 if (ctx
->insn_flags
& ASE_DSP
) {
1820 generate_exception_end(ctx
, EXCP_DSPDIS
);
1822 generate_exception_end(ctx
, EXCP_RI
);
1827 /* This code generates a "reserved instruction" exception if the
1828 CPU does not support the instruction set corresponding to flags. */
1829 static inline void check_insn(DisasContext
*ctx
, int flags
)
1831 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1832 generate_exception_end(ctx
, EXCP_RI
);
1836 /* This code generates a "reserved instruction" exception if the
1837 CPU has corresponding flag set which indicates that the instruction
1838 has been removed. */
1839 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1841 if (unlikely(ctx
->insn_flags
& flags
)) {
1842 generate_exception_end(ctx
, EXCP_RI
);
1846 /* This code generates a "reserved instruction" exception if the
1847 CPU does not support 64-bit paired-single (PS) floating point data type */
1848 static inline void check_ps(DisasContext
*ctx
)
1850 if (unlikely(!ctx
->ps
)) {
1851 generate_exception(ctx
, EXCP_RI
);
1853 check_cp1_64bitmode(ctx
);
1856 #ifdef TARGET_MIPS64
1857 /* This code generates a "reserved instruction" exception if 64-bit
1858 instructions are not enabled. */
1859 static inline void check_mips_64(DisasContext
*ctx
)
1861 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1862 generate_exception_end(ctx
, EXCP_RI
);
1866 #ifndef CONFIG_USER_ONLY
1867 static inline void check_mvh(DisasContext
*ctx
)
1869 if (unlikely(!ctx
->mvh
)) {
1870 generate_exception(ctx
, EXCP_RI
);
1875 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1876 calling interface for 32 and 64-bit FPRs. No sense in changing
1877 all callers for gen_load_fpr32 when we need the CTX parameter for
1879 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1880 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1881 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1882 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1883 int ft, int fs, int cc) \
1885 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1886 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1895 check_cp1_registers(ctx, fs | ft); \
1903 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1904 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1906 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1907 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1908 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1909 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1910 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1911 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1912 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1913 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1914 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1915 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1916 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1917 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1918 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1919 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1920 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1921 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1924 tcg_temp_free_i##bits (fp0); \
1925 tcg_temp_free_i##bits (fp1); \
1928 FOP_CONDS(, 0, d
, FMT_D
, 64)
1929 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1930 FOP_CONDS(, 0, s
, FMT_S
, 32)
1931 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1932 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1933 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1936 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1937 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1938 int ft, int fs, int fd) \
1940 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1941 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1942 if (ifmt == FMT_D) { \
1943 check_cp1_registers(ctx, fs | ft | fd); \
1945 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1946 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1949 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2018 tcg_temp_free_i ## bits (fp0); \
2019 tcg_temp_free_i ## bits (fp1); \
2022 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2023 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2025 #undef gen_ldcmp_fpr32
2026 #undef gen_ldcmp_fpr64
2028 /* load/store instructions. */
2029 #ifdef CONFIG_USER_ONLY
2030 #define OP_LD_ATOMIC(insn,fname) \
2031 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2033 TCGv t0 = tcg_temp_new(); \
2034 tcg_gen_mov_tl(t0, arg1); \
2035 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2036 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2037 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2038 tcg_temp_free(t0); \
2041 #define OP_LD_ATOMIC(insn,fname) \
2042 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2044 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2047 OP_LD_ATOMIC(ll
,ld32s
);
2048 #if defined(TARGET_MIPS64)
2049 OP_LD_ATOMIC(lld
,ld64
);
2053 #ifdef CONFIG_USER_ONLY
2054 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2055 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2057 TCGv t0 = tcg_temp_new(); \
2058 TCGLabel *l1 = gen_new_label(); \
2059 TCGLabel *l2 = gen_new_label(); \
2061 tcg_gen_andi_tl(t0, arg2, almask); \
2062 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2063 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2064 generate_exception(ctx, EXCP_AdES); \
2065 gen_set_label(l1); \
2066 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2067 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2068 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2069 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2070 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2071 generate_exception_end(ctx, EXCP_SC); \
2072 gen_set_label(l2); \
2073 tcg_gen_movi_tl(t0, 0); \
2074 gen_store_gpr(t0, rt); \
2075 tcg_temp_free(t0); \
2078 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2079 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2081 TCGv t0 = tcg_temp_new(); \
2082 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2083 gen_store_gpr(t0, rt); \
2084 tcg_temp_free(t0); \
2087 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2088 #if defined(TARGET_MIPS64)
2089 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2093 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2094 int base
, int16_t offset
)
2097 tcg_gen_movi_tl(addr
, offset
);
2098 } else if (offset
== 0) {
2099 gen_load_gpr(addr
, base
);
2101 tcg_gen_movi_tl(addr
, offset
);
2102 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2106 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2108 target_ulong pc
= ctx
->pc
;
2110 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2111 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2116 pc
&= ~(target_ulong
)3;
2121 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2122 int rt
, int base
, int16_t offset
)
2126 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2127 /* Loongson CPU uses a load to zero register for prefetch.
2128 We emulate it as a NOP. On other CPU we must perform the
2129 actual memory access. */
2133 t0
= tcg_temp_new();
2134 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2137 #if defined(TARGET_MIPS64)
2139 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2140 ctx
->default_tcg_memop_mask
);
2141 gen_store_gpr(t0
, rt
);
2144 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2145 ctx
->default_tcg_memop_mask
);
2146 gen_store_gpr(t0
, rt
);
2150 op_ld_lld(t0
, t0
, ctx
);
2151 gen_store_gpr(t0
, rt
);
2154 t1
= tcg_temp_new();
2155 /* Do a byte access to possibly trigger a page
2156 fault with the unaligned address. */
2157 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2158 tcg_gen_andi_tl(t1
, t0
, 7);
2159 #ifndef TARGET_WORDS_BIGENDIAN
2160 tcg_gen_xori_tl(t1
, t1
, 7);
2162 tcg_gen_shli_tl(t1
, t1
, 3);
2163 tcg_gen_andi_tl(t0
, t0
, ~7);
2164 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2165 tcg_gen_shl_tl(t0
, t0
, t1
);
2166 t2
= tcg_const_tl(-1);
2167 tcg_gen_shl_tl(t2
, t2
, t1
);
2168 gen_load_gpr(t1
, rt
);
2169 tcg_gen_andc_tl(t1
, t1
, t2
);
2171 tcg_gen_or_tl(t0
, t0
, t1
);
2173 gen_store_gpr(t0
, rt
);
2176 t1
= tcg_temp_new();
2177 /* Do a byte access to possibly trigger a page
2178 fault with the unaligned address. */
2179 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2180 tcg_gen_andi_tl(t1
, t0
, 7);
2181 #ifdef TARGET_WORDS_BIGENDIAN
2182 tcg_gen_xori_tl(t1
, t1
, 7);
2184 tcg_gen_shli_tl(t1
, t1
, 3);
2185 tcg_gen_andi_tl(t0
, t0
, ~7);
2186 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2187 tcg_gen_shr_tl(t0
, t0
, t1
);
2188 tcg_gen_xori_tl(t1
, t1
, 63);
2189 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2190 tcg_gen_shl_tl(t2
, t2
, t1
);
2191 gen_load_gpr(t1
, rt
);
2192 tcg_gen_and_tl(t1
, t1
, t2
);
2194 tcg_gen_or_tl(t0
, t0
, t1
);
2196 gen_store_gpr(t0
, rt
);
2199 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2200 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2203 gen_store_gpr(t0
, rt
);
2207 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2208 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2211 gen_store_gpr(t0
, rt
);
2214 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2215 ctx
->default_tcg_memop_mask
);
2216 gen_store_gpr(t0
, rt
);
2219 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2220 ctx
->default_tcg_memop_mask
);
2221 gen_store_gpr(t0
, rt
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2225 ctx
->default_tcg_memop_mask
);
2226 gen_store_gpr(t0
, rt
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2230 gen_store_gpr(t0
, rt
);
2233 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2234 gen_store_gpr(t0
, rt
);
2237 t1
= tcg_temp_new();
2238 /* Do a byte access to possibly trigger a page
2239 fault with the unaligned address. */
2240 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2241 tcg_gen_andi_tl(t1
, t0
, 3);
2242 #ifndef TARGET_WORDS_BIGENDIAN
2243 tcg_gen_xori_tl(t1
, t1
, 3);
2245 tcg_gen_shli_tl(t1
, t1
, 3);
2246 tcg_gen_andi_tl(t0
, t0
, ~3);
2247 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2248 tcg_gen_shl_tl(t0
, t0
, t1
);
2249 t2
= tcg_const_tl(-1);
2250 tcg_gen_shl_tl(t2
, t2
, t1
);
2251 gen_load_gpr(t1
, rt
);
2252 tcg_gen_andc_tl(t1
, t1
, t2
);
2254 tcg_gen_or_tl(t0
, t0
, t1
);
2256 tcg_gen_ext32s_tl(t0
, t0
);
2257 gen_store_gpr(t0
, rt
);
2260 t1
= tcg_temp_new();
2261 /* Do a byte access to possibly trigger a page
2262 fault with the unaligned address. */
2263 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2264 tcg_gen_andi_tl(t1
, t0
, 3);
2265 #ifdef TARGET_WORDS_BIGENDIAN
2266 tcg_gen_xori_tl(t1
, t1
, 3);
2268 tcg_gen_shli_tl(t1
, t1
, 3);
2269 tcg_gen_andi_tl(t0
, t0
, ~3);
2270 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2271 tcg_gen_shr_tl(t0
, t0
, t1
);
2272 tcg_gen_xori_tl(t1
, t1
, 31);
2273 t2
= tcg_const_tl(0xfffffffeull
);
2274 tcg_gen_shl_tl(t2
, t2
, t1
);
2275 gen_load_gpr(t1
, rt
);
2276 tcg_gen_and_tl(t1
, t1
, t2
);
2278 tcg_gen_or_tl(t0
, t0
, t1
);
2280 tcg_gen_ext32s_tl(t0
, t0
);
2281 gen_store_gpr(t0
, rt
);
2285 op_ld_ll(t0
, t0
, ctx
);
2286 gen_store_gpr(t0
, rt
);
2293 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2294 int base
, int16_t offset
)
2296 TCGv t0
= tcg_temp_new();
2297 TCGv t1
= tcg_temp_new();
2299 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2300 gen_load_gpr(t1
, rt
);
2302 #if defined(TARGET_MIPS64)
2304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2305 ctx
->default_tcg_memop_mask
);
2308 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2311 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2316 ctx
->default_tcg_memop_mask
);
2319 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2320 ctx
->default_tcg_memop_mask
);
2323 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2326 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2329 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2337 /* Store conditional */
2338 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2339 int base
, int16_t offset
)
2343 #ifdef CONFIG_USER_ONLY
2344 t0
= tcg_temp_local_new();
2345 t1
= tcg_temp_local_new();
2347 t0
= tcg_temp_new();
2348 t1
= tcg_temp_new();
2350 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2351 gen_load_gpr(t1
, rt
);
2353 #if defined(TARGET_MIPS64)
2356 op_st_scd(t1
, t0
, rt
, ctx
);
2361 op_st_sc(t1
, t0
, rt
, ctx
);
2368 /* Load and store */
2369 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2370 int base
, int16_t offset
)
2372 TCGv t0
= tcg_temp_new();
2374 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2375 /* Don't do NOP if destination is zero: we must perform the actual
2380 TCGv_i32 fp0
= tcg_temp_new_i32();
2381 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2382 ctx
->default_tcg_memop_mask
);
2383 gen_store_fpr32(ctx
, fp0
, ft
);
2384 tcg_temp_free_i32(fp0
);
2389 TCGv_i32 fp0
= tcg_temp_new_i32();
2390 gen_load_fpr32(ctx
, fp0
, ft
);
2391 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2392 ctx
->default_tcg_memop_mask
);
2393 tcg_temp_free_i32(fp0
);
2398 TCGv_i64 fp0
= tcg_temp_new_i64();
2399 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2400 ctx
->default_tcg_memop_mask
);
2401 gen_store_fpr64(ctx
, fp0
, ft
);
2402 tcg_temp_free_i64(fp0
);
2407 TCGv_i64 fp0
= tcg_temp_new_i64();
2408 gen_load_fpr64(ctx
, fp0
, ft
);
2409 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2410 ctx
->default_tcg_memop_mask
);
2411 tcg_temp_free_i64(fp0
);
2415 MIPS_INVAL("flt_ldst");
2416 generate_exception_end(ctx
, EXCP_RI
);
2423 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2424 int rs
, int16_t imm
)
2426 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2427 check_cp1_enabled(ctx
);
2431 check_insn(ctx
, ISA_MIPS2
);
2434 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2437 generate_exception_err(ctx
, EXCP_CpU
, 1);
2441 /* Arithmetic with immediate operand */
2442 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2443 int rt
, int rs
, int16_t imm
)
2445 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2447 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2448 /* If no destination, treat it as a NOP.
2449 For addi, we must generate the overflow exception when needed. */
2455 TCGv t0
= tcg_temp_local_new();
2456 TCGv t1
= tcg_temp_new();
2457 TCGv t2
= tcg_temp_new();
2458 TCGLabel
*l1
= gen_new_label();
2460 gen_load_gpr(t1
, rs
);
2461 tcg_gen_addi_tl(t0
, t1
, uimm
);
2462 tcg_gen_ext32s_tl(t0
, t0
);
2464 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2465 tcg_gen_xori_tl(t2
, t0
, uimm
);
2466 tcg_gen_and_tl(t1
, t1
, t2
);
2468 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2470 /* operands of same sign, result different sign */
2471 generate_exception(ctx
, EXCP_OVERFLOW
);
2473 tcg_gen_ext32s_tl(t0
, t0
);
2474 gen_store_gpr(t0
, rt
);
2480 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2481 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2483 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2486 #if defined(TARGET_MIPS64)
2489 TCGv t0
= tcg_temp_local_new();
2490 TCGv t1
= tcg_temp_new();
2491 TCGv t2
= tcg_temp_new();
2492 TCGLabel
*l1
= gen_new_label();
2494 gen_load_gpr(t1
, rs
);
2495 tcg_gen_addi_tl(t0
, t1
, uimm
);
2497 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2498 tcg_gen_xori_tl(t2
, t0
, uimm
);
2499 tcg_gen_and_tl(t1
, t1
, t2
);
2501 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2503 /* operands of same sign, result different sign */
2504 generate_exception(ctx
, EXCP_OVERFLOW
);
2506 gen_store_gpr(t0
, rt
);
2512 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2514 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2521 /* Logic with immediate operand */
2522 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2523 int rt
, int rs
, int16_t imm
)
2528 /* If no destination, treat it as a NOP. */
2531 uimm
= (uint16_t)imm
;
2534 if (likely(rs
!= 0))
2535 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2537 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2541 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2543 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2546 if (likely(rs
!= 0))
2547 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2549 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2552 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2554 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2555 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2557 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2566 /* Set on less than with immediate operand */
2567 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2568 int rt
, int rs
, int16_t imm
)
2570 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2574 /* If no destination, treat it as a NOP. */
2577 t0
= tcg_temp_new();
2578 gen_load_gpr(t0
, rs
);
2581 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2584 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2590 /* Shifts with immediate operand */
2591 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2592 int rt
, int rs
, int16_t imm
)
2594 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2598 /* If no destination, treat it as a NOP. */
2602 t0
= tcg_temp_new();
2603 gen_load_gpr(t0
, rs
);
2606 tcg_gen_shli_tl(t0
, t0
, uimm
);
2607 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2610 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2614 tcg_gen_ext32u_tl(t0
, t0
);
2615 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2617 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2622 TCGv_i32 t1
= tcg_temp_new_i32();
2624 tcg_gen_trunc_tl_i32(t1
, t0
);
2625 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2626 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2627 tcg_temp_free_i32(t1
);
2629 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2632 #if defined(TARGET_MIPS64)
2634 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2640 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2644 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2646 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2650 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2653 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2659 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2667 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2668 int rd
, int rs
, int rt
)
2670 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2671 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2672 /* If no destination, treat it as a NOP.
2673 For add & sub, we must generate the overflow exception when needed. */
2680 TCGv t0
= tcg_temp_local_new();
2681 TCGv t1
= tcg_temp_new();
2682 TCGv t2
= tcg_temp_new();
2683 TCGLabel
*l1
= gen_new_label();
2685 gen_load_gpr(t1
, rs
);
2686 gen_load_gpr(t2
, rt
);
2687 tcg_gen_add_tl(t0
, t1
, t2
);
2688 tcg_gen_ext32s_tl(t0
, t0
);
2689 tcg_gen_xor_tl(t1
, t1
, t2
);
2690 tcg_gen_xor_tl(t2
, t0
, t2
);
2691 tcg_gen_andc_tl(t1
, t2
, t1
);
2693 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2695 /* operands of same sign, result different sign */
2696 generate_exception(ctx
, EXCP_OVERFLOW
);
2698 gen_store_gpr(t0
, rd
);
2703 if (rs
!= 0 && rt
!= 0) {
2704 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2705 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2706 } else if (rs
== 0 && rt
!= 0) {
2707 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2708 } else if (rs
!= 0 && rt
== 0) {
2709 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2711 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2716 TCGv t0
= tcg_temp_local_new();
2717 TCGv t1
= tcg_temp_new();
2718 TCGv t2
= tcg_temp_new();
2719 TCGLabel
*l1
= gen_new_label();
2721 gen_load_gpr(t1
, rs
);
2722 gen_load_gpr(t2
, rt
);
2723 tcg_gen_sub_tl(t0
, t1
, t2
);
2724 tcg_gen_ext32s_tl(t0
, t0
);
2725 tcg_gen_xor_tl(t2
, t1
, t2
);
2726 tcg_gen_xor_tl(t1
, t0
, t1
);
2727 tcg_gen_and_tl(t1
, t1
, t2
);
2729 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2731 /* operands of different sign, first operand and result different sign */
2732 generate_exception(ctx
, EXCP_OVERFLOW
);
2734 gen_store_gpr(t0
, rd
);
2739 if (rs
!= 0 && rt
!= 0) {
2740 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2741 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2742 } else if (rs
== 0 && rt
!= 0) {
2743 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2745 } else if (rs
!= 0 && rt
== 0) {
2746 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2751 #if defined(TARGET_MIPS64)
2754 TCGv t0
= tcg_temp_local_new();
2755 TCGv t1
= tcg_temp_new();
2756 TCGv t2
= tcg_temp_new();
2757 TCGLabel
*l1
= gen_new_label();
2759 gen_load_gpr(t1
, rs
);
2760 gen_load_gpr(t2
, rt
);
2761 tcg_gen_add_tl(t0
, t1
, t2
);
2762 tcg_gen_xor_tl(t1
, t1
, t2
);
2763 tcg_gen_xor_tl(t2
, t0
, t2
);
2764 tcg_gen_andc_tl(t1
, t2
, t1
);
2766 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2768 /* operands of same sign, result different sign */
2769 generate_exception(ctx
, EXCP_OVERFLOW
);
2771 gen_store_gpr(t0
, rd
);
2776 if (rs
!= 0 && rt
!= 0) {
2777 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2778 } else if (rs
== 0 && rt
!= 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2780 } else if (rs
!= 0 && rt
== 0) {
2781 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2783 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2788 TCGv t0
= tcg_temp_local_new();
2789 TCGv t1
= tcg_temp_new();
2790 TCGv t2
= tcg_temp_new();
2791 TCGLabel
*l1
= gen_new_label();
2793 gen_load_gpr(t1
, rs
);
2794 gen_load_gpr(t2
, rt
);
2795 tcg_gen_sub_tl(t0
, t1
, t2
);
2796 tcg_gen_xor_tl(t2
, t1
, t2
);
2797 tcg_gen_xor_tl(t1
, t0
, t1
);
2798 tcg_gen_and_tl(t1
, t1
, t2
);
2800 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2802 /* operands of different sign, first operand and result different sign */
2803 generate_exception(ctx
, EXCP_OVERFLOW
);
2805 gen_store_gpr(t0
, rd
);
2810 if (rs
!= 0 && rt
!= 0) {
2811 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2812 } else if (rs
== 0 && rt
!= 0) {
2813 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2814 } else if (rs
!= 0 && rt
== 0) {
2815 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2822 if (likely(rs
!= 0 && rt
!= 0)) {
2823 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2824 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2832 /* Conditional move */
2833 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2834 int rd
, int rs
, int rt
)
2839 /* If no destination, treat it as a NOP. */
2843 t0
= tcg_temp_new();
2844 gen_load_gpr(t0
, rt
);
2845 t1
= tcg_const_tl(0);
2846 t2
= tcg_temp_new();
2847 gen_load_gpr(t2
, rs
);
2850 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2853 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2856 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2859 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2868 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2869 int rd
, int rs
, int rt
)
2872 /* If no destination, treat it as a NOP. */
2878 if (likely(rs
!= 0 && rt
!= 0)) {
2879 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2881 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2885 if (rs
!= 0 && rt
!= 0) {
2886 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2887 } else if (rs
== 0 && rt
!= 0) {
2888 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2889 } else if (rs
!= 0 && rt
== 0) {
2890 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2896 if (likely(rs
!= 0 && rt
!= 0)) {
2897 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2898 } else if (rs
== 0 && rt
!= 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2900 } else if (rs
!= 0 && rt
== 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2903 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2907 if (likely(rs
!= 0 && rt
!= 0)) {
2908 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2909 } else if (rs
== 0 && rt
!= 0) {
2910 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2911 } else if (rs
!= 0 && rt
== 0) {
2912 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2914 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2920 /* Set on lower than */
2921 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2922 int rd
, int rs
, int rt
)
2927 /* If no destination, treat it as a NOP. */
2931 t0
= tcg_temp_new();
2932 t1
= tcg_temp_new();
2933 gen_load_gpr(t0
, rs
);
2934 gen_load_gpr(t1
, rt
);
2937 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2940 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2948 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2949 int rd
, int rs
, int rt
)
2954 /* If no destination, treat it as a NOP.
2955 For add & sub, we must generate the overflow exception when needed. */
2959 t0
= tcg_temp_new();
2960 t1
= tcg_temp_new();
2961 gen_load_gpr(t0
, rs
);
2962 gen_load_gpr(t1
, rt
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_shl_tl(t0
, t1
, t0
);
2967 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2970 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2971 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2974 tcg_gen_ext32u_tl(t1
, t1
);
2975 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2976 tcg_gen_shr_tl(t0
, t1
, t0
);
2977 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2981 TCGv_i32 t2
= tcg_temp_new_i32();
2982 TCGv_i32 t3
= tcg_temp_new_i32();
2984 tcg_gen_trunc_tl_i32(t2
, t0
);
2985 tcg_gen_trunc_tl_i32(t3
, t1
);
2986 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2987 tcg_gen_rotr_i32(t2
, t3
, t2
);
2988 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2989 tcg_temp_free_i32(t2
);
2990 tcg_temp_free_i32(t3
);
2993 #if defined(TARGET_MIPS64)
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3003 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3004 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3007 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3008 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3016 /* Arithmetic on HI/LO registers */
3017 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3019 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3030 #if defined(TARGET_MIPS64)
3032 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3036 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3040 #if defined(TARGET_MIPS64)
3042 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3046 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3051 #if defined(TARGET_MIPS64)
3053 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3057 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3060 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3065 #if defined(TARGET_MIPS64)
3067 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3071 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3074 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3080 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3083 TCGv t0
= tcg_const_tl(addr
);
3084 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3085 gen_store_gpr(t0
, reg
);
3089 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3095 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3098 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3099 addr
= addr_add(ctx
, pc
, offset
);
3100 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3104 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3105 addr
= addr_add(ctx
, pc
, offset
);
3106 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3108 #if defined(TARGET_MIPS64)
3111 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3112 addr
= addr_add(ctx
, pc
, offset
);
3113 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3117 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3120 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3121 addr
= addr_add(ctx
, pc
, offset
);
3122 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3127 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3128 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3129 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3132 #if defined(TARGET_MIPS64)
3133 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3134 case R6_OPC_LDPC
+ (1 << 16):
3135 case R6_OPC_LDPC
+ (2 << 16):
3136 case R6_OPC_LDPC
+ (3 << 16):
3138 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3139 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3140 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3144 MIPS_INVAL("OPC_PCREL");
3145 generate_exception_end(ctx
, EXCP_RI
);
3152 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3161 t0
= tcg_temp_new();
3162 t1
= tcg_temp_new();
3164 gen_load_gpr(t0
, rs
);
3165 gen_load_gpr(t1
, rt
);
3170 TCGv t2
= tcg_temp_new();
3171 TCGv t3
= tcg_temp_new();
3172 tcg_gen_ext32s_tl(t0
, t0
);
3173 tcg_gen_ext32s_tl(t1
, t1
);
3174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3176 tcg_gen_and_tl(t2
, t2
, t3
);
3177 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3178 tcg_gen_or_tl(t2
, t2
, t3
);
3179 tcg_gen_movi_tl(t3
, 0);
3180 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3181 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3182 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3189 TCGv t2
= tcg_temp_new();
3190 TCGv t3
= tcg_temp_new();
3191 tcg_gen_ext32s_tl(t0
, t0
);
3192 tcg_gen_ext32s_tl(t1
, t1
);
3193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3195 tcg_gen_and_tl(t2
, t2
, t3
);
3196 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3197 tcg_gen_or_tl(t2
, t2
, t3
);
3198 tcg_gen_movi_tl(t3
, 0);
3199 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3200 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3201 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3208 TCGv t2
= tcg_const_tl(0);
3209 TCGv t3
= tcg_const_tl(1);
3210 tcg_gen_ext32u_tl(t0
, t0
);
3211 tcg_gen_ext32u_tl(t1
, t1
);
3212 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3213 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3214 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3221 TCGv t2
= tcg_const_tl(0);
3222 TCGv t3
= tcg_const_tl(1);
3223 tcg_gen_ext32u_tl(t0
, t0
);
3224 tcg_gen_ext32u_tl(t1
, t1
);
3225 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3226 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3227 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3234 TCGv_i32 t2
= tcg_temp_new_i32();
3235 TCGv_i32 t3
= tcg_temp_new_i32();
3236 tcg_gen_trunc_tl_i32(t2
, t0
);
3237 tcg_gen_trunc_tl_i32(t3
, t1
);
3238 tcg_gen_mul_i32(t2
, t2
, t3
);
3239 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3240 tcg_temp_free_i32(t2
);
3241 tcg_temp_free_i32(t3
);
3246 TCGv_i32 t2
= tcg_temp_new_i32();
3247 TCGv_i32 t3
= tcg_temp_new_i32();
3248 tcg_gen_trunc_tl_i32(t2
, t0
);
3249 tcg_gen_trunc_tl_i32(t3
, t1
);
3250 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3251 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3252 tcg_temp_free_i32(t2
);
3253 tcg_temp_free_i32(t3
);
3258 TCGv_i32 t2
= tcg_temp_new_i32();
3259 TCGv_i32 t3
= tcg_temp_new_i32();
3260 tcg_gen_trunc_tl_i32(t2
, t0
);
3261 tcg_gen_trunc_tl_i32(t3
, t1
);
3262 tcg_gen_mul_i32(t2
, t2
, t3
);
3263 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3264 tcg_temp_free_i32(t2
);
3265 tcg_temp_free_i32(t3
);
3270 TCGv_i32 t2
= tcg_temp_new_i32();
3271 TCGv_i32 t3
= tcg_temp_new_i32();
3272 tcg_gen_trunc_tl_i32(t2
, t0
);
3273 tcg_gen_trunc_tl_i32(t3
, t1
);
3274 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3275 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3276 tcg_temp_free_i32(t2
);
3277 tcg_temp_free_i32(t3
);
3280 #if defined(TARGET_MIPS64)
3283 TCGv t2
= tcg_temp_new();
3284 TCGv t3
= tcg_temp_new();
3285 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3287 tcg_gen_and_tl(t2
, t2
, t3
);
3288 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3289 tcg_gen_or_tl(t2
, t2
, t3
);
3290 tcg_gen_movi_tl(t3
, 0);
3291 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3292 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3299 TCGv t2
= tcg_temp_new();
3300 TCGv t3
= tcg_temp_new();
3301 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3303 tcg_gen_and_tl(t2
, t2
, t3
);
3304 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3305 tcg_gen_or_tl(t2
, t2
, t3
);
3306 tcg_gen_movi_tl(t3
, 0);
3307 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3308 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3315 TCGv t2
= tcg_const_tl(0);
3316 TCGv t3
= tcg_const_tl(1);
3317 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3318 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3325 TCGv t2
= tcg_const_tl(0);
3326 TCGv t3
= tcg_const_tl(1);
3327 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3328 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3334 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3338 TCGv t2
= tcg_temp_new();
3339 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3344 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3348 TCGv t2
= tcg_temp_new();
3349 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3355 MIPS_INVAL("r6 mul/div");
3356 generate_exception_end(ctx
, EXCP_RI
);
3364 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3365 int acc
, int rs
, int rt
)
3369 t0
= tcg_temp_new();
3370 t1
= tcg_temp_new();
3372 gen_load_gpr(t0
, rs
);
3373 gen_load_gpr(t1
, rt
);
3382 TCGv t2
= tcg_temp_new();
3383 TCGv t3
= tcg_temp_new();
3384 tcg_gen_ext32s_tl(t0
, t0
);
3385 tcg_gen_ext32s_tl(t1
, t1
);
3386 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3388 tcg_gen_and_tl(t2
, t2
, t3
);
3389 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3390 tcg_gen_or_tl(t2
, t2
, t3
);
3391 tcg_gen_movi_tl(t3
, 0);
3392 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3393 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3394 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3395 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3396 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3403 TCGv t2
= tcg_const_tl(0);
3404 TCGv t3
= tcg_const_tl(1);
3405 tcg_gen_ext32u_tl(t0
, t0
);
3406 tcg_gen_ext32u_tl(t1
, t1
);
3407 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3408 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3409 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3410 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3411 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3418 TCGv_i32 t2
= tcg_temp_new_i32();
3419 TCGv_i32 t3
= tcg_temp_new_i32();
3420 tcg_gen_trunc_tl_i32(t2
, t0
);
3421 tcg_gen_trunc_tl_i32(t3
, t1
);
3422 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3423 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3424 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3425 tcg_temp_free_i32(t2
);
3426 tcg_temp_free_i32(t3
);
3431 TCGv_i32 t2
= tcg_temp_new_i32();
3432 TCGv_i32 t3
= tcg_temp_new_i32();
3433 tcg_gen_trunc_tl_i32(t2
, t0
);
3434 tcg_gen_trunc_tl_i32(t3
, t1
);
3435 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3436 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3437 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3438 tcg_temp_free_i32(t2
);
3439 tcg_temp_free_i32(t3
);
3442 #if defined(TARGET_MIPS64)
3445 TCGv t2
= tcg_temp_new();
3446 TCGv t3
= tcg_temp_new();
3447 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3449 tcg_gen_and_tl(t2
, t2
, t3
);
3450 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3451 tcg_gen_or_tl(t2
, t2
, t3
);
3452 tcg_gen_movi_tl(t3
, 0);
3453 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3454 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3455 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3462 TCGv t2
= tcg_const_tl(0);
3463 TCGv t3
= tcg_const_tl(1);
3464 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3465 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3466 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3472 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3475 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3480 TCGv_i64 t2
= tcg_temp_new_i64();
3481 TCGv_i64 t3
= tcg_temp_new_i64();
3483 tcg_gen_ext_tl_i64(t2
, t0
);
3484 tcg_gen_ext_tl_i64(t3
, t1
);
3485 tcg_gen_mul_i64(t2
, t2
, t3
);
3486 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3487 tcg_gen_add_i64(t2
, t2
, t3
);
3488 tcg_temp_free_i64(t3
);
3489 gen_move_low32(cpu_LO
[acc
], t2
);
3490 gen_move_high32(cpu_HI
[acc
], t2
);
3491 tcg_temp_free_i64(t2
);
3496 TCGv_i64 t2
= tcg_temp_new_i64();
3497 TCGv_i64 t3
= tcg_temp_new_i64();
3499 tcg_gen_ext32u_tl(t0
, t0
);
3500 tcg_gen_ext32u_tl(t1
, t1
);
3501 tcg_gen_extu_tl_i64(t2
, t0
);
3502 tcg_gen_extu_tl_i64(t3
, t1
);
3503 tcg_gen_mul_i64(t2
, t2
, t3
);
3504 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3505 tcg_gen_add_i64(t2
, t2
, t3
);
3506 tcg_temp_free_i64(t3
);
3507 gen_move_low32(cpu_LO
[acc
], t2
);
3508 gen_move_high32(cpu_HI
[acc
], t2
);
3509 tcg_temp_free_i64(t2
);
3514 TCGv_i64 t2
= tcg_temp_new_i64();
3515 TCGv_i64 t3
= tcg_temp_new_i64();
3517 tcg_gen_ext_tl_i64(t2
, t0
);
3518 tcg_gen_ext_tl_i64(t3
, t1
);
3519 tcg_gen_mul_i64(t2
, t2
, t3
);
3520 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3521 tcg_gen_sub_i64(t2
, t3
, t2
);
3522 tcg_temp_free_i64(t3
);
3523 gen_move_low32(cpu_LO
[acc
], t2
);
3524 gen_move_high32(cpu_HI
[acc
], t2
);
3525 tcg_temp_free_i64(t2
);
3530 TCGv_i64 t2
= tcg_temp_new_i64();
3531 TCGv_i64 t3
= tcg_temp_new_i64();
3533 tcg_gen_ext32u_tl(t0
, t0
);
3534 tcg_gen_ext32u_tl(t1
, t1
);
3535 tcg_gen_extu_tl_i64(t2
, t0
);
3536 tcg_gen_extu_tl_i64(t3
, t1
);
3537 tcg_gen_mul_i64(t2
, t2
, t3
);
3538 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3539 tcg_gen_sub_i64(t2
, t3
, t2
);
3540 tcg_temp_free_i64(t3
);
3541 gen_move_low32(cpu_LO
[acc
], t2
);
3542 gen_move_high32(cpu_HI
[acc
], t2
);
3543 tcg_temp_free_i64(t2
);
3547 MIPS_INVAL("mul/div");
3548 generate_exception_end(ctx
, EXCP_RI
);
3556 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3557 int rd
, int rs
, int rt
)
3559 TCGv t0
= tcg_temp_new();
3560 TCGv t1
= tcg_temp_new();
3562 gen_load_gpr(t0
, rs
);
3563 gen_load_gpr(t1
, rt
);
3566 case OPC_VR54XX_MULS
:
3567 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MULSU
:
3570 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MACC
:
3573 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MACCU
:
3576 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MSAC
:
3579 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MSACU
:
3582 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULHI
:
3585 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MULHIU
:
3588 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MULSHI
:
3591 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MULSHIU
:
3594 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MACCHI
:
3597 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3599 case OPC_VR54XX_MACCHIU
:
3600 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3602 case OPC_VR54XX_MSACHI
:
3603 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3605 case OPC_VR54XX_MSACHIU
:
3606 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3609 MIPS_INVAL("mul vr54xx");
3610 generate_exception_end(ctx
, EXCP_RI
);
3613 gen_store_gpr(t0
, rd
);
3620 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3630 gen_load_gpr(t0
, rs
);
3635 #if defined(TARGET_MIPS64)
3639 tcg_gen_not_tl(t0
, t0
);
3648 tcg_gen_ext32u_tl(t0
, t0
);
3649 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3650 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3652 #if defined(TARGET_MIPS64)
3657 tcg_gen_clzi_i64(t0
, t0
, 64);
3663 /* Godson integer instructions */
3664 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3665 int rd
, int rs
, int rt
)
3677 case OPC_MULTU_G_2E
:
3678 case OPC_MULTU_G_2F
:
3679 #if defined(TARGET_MIPS64)
3680 case OPC_DMULT_G_2E
:
3681 case OPC_DMULT_G_2F
:
3682 case OPC_DMULTU_G_2E
:
3683 case OPC_DMULTU_G_2F
:
3685 t0
= tcg_temp_new();
3686 t1
= tcg_temp_new();
3689 t0
= tcg_temp_local_new();
3690 t1
= tcg_temp_local_new();
3694 gen_load_gpr(t0
, rs
);
3695 gen_load_gpr(t1
, rt
);
3700 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3701 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3703 case OPC_MULTU_G_2E
:
3704 case OPC_MULTU_G_2F
:
3705 tcg_gen_ext32u_tl(t0
, t0
);
3706 tcg_gen_ext32u_tl(t1
, t1
);
3707 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3708 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3713 TCGLabel
*l1
= gen_new_label();
3714 TCGLabel
*l2
= gen_new_label();
3715 TCGLabel
*l3
= gen_new_label();
3716 tcg_gen_ext32s_tl(t0
, t0
);
3717 tcg_gen_ext32s_tl(t1
, t1
);
3718 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3719 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3722 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3723 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3724 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3727 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3728 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3735 TCGLabel
*l1
= gen_new_label();
3736 TCGLabel
*l2
= gen_new_label();
3737 tcg_gen_ext32u_tl(t0
, t0
);
3738 tcg_gen_ext32u_tl(t1
, t1
);
3739 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3743 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3751 TCGLabel
*l1
= gen_new_label();
3752 TCGLabel
*l2
= gen_new_label();
3753 TCGLabel
*l3
= gen_new_label();
3754 tcg_gen_ext32u_tl(t0
, t0
);
3755 tcg_gen_ext32u_tl(t1
, t1
);
3756 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3757 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3758 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3760 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3763 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3764 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3771 TCGLabel
*l1
= gen_new_label();
3772 TCGLabel
*l2
= gen_new_label();
3773 tcg_gen_ext32u_tl(t0
, t0
);
3774 tcg_gen_ext32u_tl(t1
, t1
);
3775 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3776 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3779 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3780 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3784 #if defined(TARGET_MIPS64)
3785 case OPC_DMULT_G_2E
:
3786 case OPC_DMULT_G_2F
:
3787 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3789 case OPC_DMULTU_G_2E
:
3790 case OPC_DMULTU_G_2F
:
3791 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3796 TCGLabel
*l1
= gen_new_label();
3797 TCGLabel
*l2
= gen_new_label();
3798 TCGLabel
*l3
= gen_new_label();
3799 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3800 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3803 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3805 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3808 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3812 case OPC_DDIVU_G_2E
:
3813 case OPC_DDIVU_G_2F
:
3815 TCGLabel
*l1
= gen_new_label();
3816 TCGLabel
*l2
= gen_new_label();
3817 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3821 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3828 TCGLabel
*l1
= gen_new_label();
3829 TCGLabel
*l2
= gen_new_label();
3830 TCGLabel
*l3
= gen_new_label();
3831 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3832 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3833 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3835 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3838 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3842 case OPC_DMODU_G_2E
:
3843 case OPC_DMODU_G_2F
:
3845 TCGLabel
*l1
= gen_new_label();
3846 TCGLabel
*l2
= gen_new_label();
3847 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3848 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3851 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3862 /* Loongson multimedia instructions */
3863 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3865 uint32_t opc
, shift_max
;
3868 opc
= MASK_LMI(ctx
->opcode
);
3874 t0
= tcg_temp_local_new_i64();
3875 t1
= tcg_temp_local_new_i64();
3878 t0
= tcg_temp_new_i64();
3879 t1
= tcg_temp_new_i64();
3883 check_cp1_enabled(ctx
);
3884 gen_load_fpr64(ctx
, t0
, rs
);
3885 gen_load_fpr64(ctx
, t1
, rt
);
3887 #define LMI_HELPER(UP, LO) \
3888 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3889 #define LMI_HELPER_1(UP, LO) \
3890 case OPC_##UP: gen_helper_##LO(t0, t0); break
3891 #define LMI_DIRECT(UP, LO, OP) \
3892 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3895 LMI_HELPER(PADDSH
, paddsh
);
3896 LMI_HELPER(PADDUSH
, paddush
);
3897 LMI_HELPER(PADDH
, paddh
);
3898 LMI_HELPER(PADDW
, paddw
);
3899 LMI_HELPER(PADDSB
, paddsb
);
3900 LMI_HELPER(PADDUSB
, paddusb
);
3901 LMI_HELPER(PADDB
, paddb
);
3903 LMI_HELPER(PSUBSH
, psubsh
);
3904 LMI_HELPER(PSUBUSH
, psubush
);
3905 LMI_HELPER(PSUBH
, psubh
);
3906 LMI_HELPER(PSUBW
, psubw
);
3907 LMI_HELPER(PSUBSB
, psubsb
);
3908 LMI_HELPER(PSUBUSB
, psubusb
);
3909 LMI_HELPER(PSUBB
, psubb
);
3911 LMI_HELPER(PSHUFH
, pshufh
);
3912 LMI_HELPER(PACKSSWH
, packsswh
);
3913 LMI_HELPER(PACKSSHB
, packsshb
);
3914 LMI_HELPER(PACKUSHB
, packushb
);
3916 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3917 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3918 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3919 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3920 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3921 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3923 LMI_HELPER(PAVGH
, pavgh
);
3924 LMI_HELPER(PAVGB
, pavgb
);
3925 LMI_HELPER(PMAXSH
, pmaxsh
);
3926 LMI_HELPER(PMINSH
, pminsh
);
3927 LMI_HELPER(PMAXUB
, pmaxub
);
3928 LMI_HELPER(PMINUB
, pminub
);
3930 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3931 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3932 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3933 LMI_HELPER(PCMPGTH
, pcmpgth
);
3934 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3935 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3937 LMI_HELPER(PSLLW
, psllw
);
3938 LMI_HELPER(PSLLH
, psllh
);
3939 LMI_HELPER(PSRLW
, psrlw
);
3940 LMI_HELPER(PSRLH
, psrlh
);
3941 LMI_HELPER(PSRAW
, psraw
);
3942 LMI_HELPER(PSRAH
, psrah
);
3944 LMI_HELPER(PMULLH
, pmullh
);
3945 LMI_HELPER(PMULHH
, pmulhh
);
3946 LMI_HELPER(PMULHUH
, pmulhuh
);
3947 LMI_HELPER(PMADDHW
, pmaddhw
);
3949 LMI_HELPER(PASUBUB
, pasubub
);
3950 LMI_HELPER_1(BIADD
, biadd
);
3951 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3953 LMI_DIRECT(PADDD
, paddd
, add
);
3954 LMI_DIRECT(PSUBD
, psubd
, sub
);
3955 LMI_DIRECT(XOR_CP2
, xor, xor);
3956 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3957 LMI_DIRECT(AND_CP2
, and, and);
3958 LMI_DIRECT(OR_CP2
, or, or);
3961 tcg_gen_andc_i64(t0
, t1
, t0
);
3965 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3968 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3971 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3974 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3978 tcg_gen_andi_i64(t1
, t1
, 3);
3979 tcg_gen_shli_i64(t1
, t1
, 4);
3980 tcg_gen_shr_i64(t0
, t0
, t1
);
3981 tcg_gen_ext16u_i64(t0
, t0
);
3985 tcg_gen_add_i64(t0
, t0
, t1
);
3986 tcg_gen_ext32s_i64(t0
, t0
);
3989 tcg_gen_sub_i64(t0
, t0
, t1
);
3990 tcg_gen_ext32s_i64(t0
, t0
);
4012 /* Make sure shift count isn't TCG undefined behaviour. */
4013 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4018 tcg_gen_shl_i64(t0
, t0
, t1
);
4022 /* Since SRA is UndefinedResult without sign-extended inputs,
4023 we can treat SRA and DSRA the same. */
4024 tcg_gen_sar_i64(t0
, t0
, t1
);
4027 /* We want to shift in zeros for SRL; zero-extend first. */
4028 tcg_gen_ext32u_i64(t0
, t0
);
4031 tcg_gen_shr_i64(t0
, t0
, t1
);
4035 if (shift_max
== 32) {
4036 tcg_gen_ext32s_i64(t0
, t0
);
4039 /* Shifts larger than MAX produce zero. */
4040 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4041 tcg_gen_neg_i64(t1
, t1
);
4042 tcg_gen_and_i64(t0
, t0
, t1
);
4048 TCGv_i64 t2
= tcg_temp_new_i64();
4049 TCGLabel
*lab
= gen_new_label();
4051 tcg_gen_mov_i64(t2
, t0
);
4052 tcg_gen_add_i64(t0
, t1
, t2
);
4053 if (opc
== OPC_ADD_CP2
) {
4054 tcg_gen_ext32s_i64(t0
, t0
);
4056 tcg_gen_xor_i64(t1
, t1
, t2
);
4057 tcg_gen_xor_i64(t2
, t2
, t0
);
4058 tcg_gen_andc_i64(t1
, t2
, t1
);
4059 tcg_temp_free_i64(t2
);
4060 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4061 generate_exception(ctx
, EXCP_OVERFLOW
);
4069 TCGv_i64 t2
= tcg_temp_new_i64();
4070 TCGLabel
*lab
= gen_new_label();
4072 tcg_gen_mov_i64(t2
, t0
);
4073 tcg_gen_sub_i64(t0
, t1
, t2
);
4074 if (opc
== OPC_SUB_CP2
) {
4075 tcg_gen_ext32s_i64(t0
, t0
);
4077 tcg_gen_xor_i64(t1
, t1
, t2
);
4078 tcg_gen_xor_i64(t2
, t2
, t0
);
4079 tcg_gen_and_i64(t1
, t1
, t2
);
4080 tcg_temp_free_i64(t2
);
4081 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4082 generate_exception(ctx
, EXCP_OVERFLOW
);
4088 tcg_gen_ext32u_i64(t0
, t0
);
4089 tcg_gen_ext32u_i64(t1
, t1
);
4090 tcg_gen_mul_i64(t0
, t0
, t1
);
4099 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4100 FD field is the CC field? */
4102 MIPS_INVAL("loongson_cp2");
4103 generate_exception_end(ctx
, EXCP_RI
);
4110 gen_store_fpr64(ctx
, t0
, rd
);
4112 tcg_temp_free_i64(t0
);
4113 tcg_temp_free_i64(t1
);
4117 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4118 int rs
, int rt
, int16_t imm
)
4121 TCGv t0
= tcg_temp_new();
4122 TCGv t1
= tcg_temp_new();
4125 /* Load needed operands */
4133 /* Compare two registers */
4135 gen_load_gpr(t0
, rs
);
4136 gen_load_gpr(t1
, rt
);
4146 /* Compare register to immediate */
4147 if (rs
!= 0 || imm
!= 0) {
4148 gen_load_gpr(t0
, rs
);
4149 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4156 case OPC_TEQ
: /* rs == rs */
4157 case OPC_TEQI
: /* r0 == 0 */
4158 case OPC_TGE
: /* rs >= rs */
4159 case OPC_TGEI
: /* r0 >= 0 */
4160 case OPC_TGEU
: /* rs >= rs unsigned */
4161 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4163 generate_exception_end(ctx
, EXCP_TRAP
);
4165 case OPC_TLT
: /* rs < rs */
4166 case OPC_TLTI
: /* r0 < 0 */
4167 case OPC_TLTU
: /* rs < rs unsigned */
4168 case OPC_TLTIU
: /* r0 < 0 unsigned */
4169 case OPC_TNE
: /* rs != rs */
4170 case OPC_TNEI
: /* r0 != 0 */
4171 /* Never trap: treat as NOP. */
4175 TCGLabel
*l1
= gen_new_label();
4180 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4184 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4188 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4192 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4196 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4200 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4203 generate_exception(ctx
, EXCP_TRAP
);
4210 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4212 if (unlikely(ctx
->singlestep_enabled
)) {
4216 #ifndef CONFIG_USER_ONLY
4217 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4223 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4225 if (use_goto_tb(ctx
, dest
)) {
4228 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4231 if (ctx
->singlestep_enabled
) {
4232 save_cpu_state(ctx
, 0);
4233 gen_helper_raise_exception_debug(cpu_env
);
4239 /* Branches (before delay slot) */
4240 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4242 int rs
, int rt
, int32_t offset
,
4245 target_ulong btgt
= -1;
4247 int bcond_compute
= 0;
4248 TCGv t0
= tcg_temp_new();
4249 TCGv t1
= tcg_temp_new();
4251 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4252 #ifdef MIPS_DEBUG_DISAS
4253 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4254 TARGET_FMT_lx
"\n", ctx
->pc
);
4256 generate_exception_end(ctx
, EXCP_RI
);
4260 /* Load needed operands */
4266 /* Compare two registers */
4268 gen_load_gpr(t0
, rs
);
4269 gen_load_gpr(t1
, rt
);
4272 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4286 /* Compare to zero */
4288 gen_load_gpr(t0
, rs
);
4291 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4294 #if defined(TARGET_MIPS64)
4296 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4298 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4301 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4306 /* Jump to immediate */
4307 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4311 /* Jump to register */
4312 if (offset
!= 0 && offset
!= 16) {
4313 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4314 others are reserved. */
4315 MIPS_INVAL("jump hint");
4316 generate_exception_end(ctx
, EXCP_RI
);
4319 gen_load_gpr(btarget
, rs
);
4322 MIPS_INVAL("branch/jump");
4323 generate_exception_end(ctx
, EXCP_RI
);
4326 if (bcond_compute
== 0) {
4327 /* No condition to be computed */
4329 case OPC_BEQ
: /* rx == rx */
4330 case OPC_BEQL
: /* rx == rx likely */
4331 case OPC_BGEZ
: /* 0 >= 0 */
4332 case OPC_BGEZL
: /* 0 >= 0 likely */
4333 case OPC_BLEZ
: /* 0 <= 0 */
4334 case OPC_BLEZL
: /* 0 <= 0 likely */
4336 ctx
->hflags
|= MIPS_HFLAG_B
;
4338 case OPC_BGEZAL
: /* 0 >= 0 */
4339 case OPC_BGEZALL
: /* 0 >= 0 likely */
4340 /* Always take and link */
4342 ctx
->hflags
|= MIPS_HFLAG_B
;
4344 case OPC_BNE
: /* rx != rx */
4345 case OPC_BGTZ
: /* 0 > 0 */
4346 case OPC_BLTZ
: /* 0 < 0 */
4349 case OPC_BLTZAL
: /* 0 < 0 */
4350 /* Handle as an unconditional branch to get correct delay
4353 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4354 ctx
->hflags
|= MIPS_HFLAG_B
;
4356 case OPC_BLTZALL
: /* 0 < 0 likely */
4357 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4358 /* Skip the instruction in the delay slot */
4361 case OPC_BNEL
: /* rx != rx likely */
4362 case OPC_BGTZL
: /* 0 > 0 likely */
4363 case OPC_BLTZL
: /* 0 < 0 likely */
4364 /* Skip the instruction in the delay slot */
4368 ctx
->hflags
|= MIPS_HFLAG_B
;
4371 ctx
->hflags
|= MIPS_HFLAG_BX
;
4375 ctx
->hflags
|= MIPS_HFLAG_B
;
4378 ctx
->hflags
|= MIPS_HFLAG_BR
;
4382 ctx
->hflags
|= MIPS_HFLAG_BR
;
4385 MIPS_INVAL("branch/jump");
4386 generate_exception_end(ctx
, EXCP_RI
);
4392 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4395 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4398 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4401 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4404 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4407 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4410 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4414 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4418 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4421 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4424 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4427 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4430 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4433 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4436 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4438 #if defined(TARGET_MIPS64)
4440 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4444 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4447 ctx
->hflags
|= MIPS_HFLAG_BC
;
4450 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4453 ctx
->hflags
|= MIPS_HFLAG_BL
;
4456 MIPS_INVAL("conditional branch/jump");
4457 generate_exception_end(ctx
, EXCP_RI
);
4462 ctx
->btarget
= btgt
;
4464 switch (delayslot_size
) {
4466 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4469 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4474 int post_delay
= insn_bytes
+ delayslot_size
;
4475 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4477 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4481 if (insn_bytes
== 2)
4482 ctx
->hflags
|= MIPS_HFLAG_B16
;
4487 /* special3 bitfield operations */
4488 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4489 int rs
, int lsb
, int msb
)
4491 TCGv t0
= tcg_temp_new();
4492 TCGv t1
= tcg_temp_new();
4494 gen_load_gpr(t1
, rs
);
4497 if (lsb
+ msb
> 31) {
4501 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4503 /* The two checks together imply that lsb == 0,
4504 so this is a simple sign-extension. */
4505 tcg_gen_ext32s_tl(t0
, t1
);
4508 #if defined(TARGET_MIPS64)
4517 if (lsb
+ msb
> 63) {
4520 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4527 gen_load_gpr(t0
, rt
);
4528 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4529 tcg_gen_ext32s_tl(t0
, t0
);
4531 #if defined(TARGET_MIPS64)
4542 gen_load_gpr(t0
, rt
);
4543 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4548 MIPS_INVAL("bitops");
4549 generate_exception_end(ctx
, EXCP_RI
);
4554 gen_store_gpr(t0
, rt
);
4559 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4564 /* If no destination, treat it as a NOP. */
4568 t0
= tcg_temp_new();
4569 gen_load_gpr(t0
, rt
);
4573 TCGv t1
= tcg_temp_new();
4575 tcg_gen_shri_tl(t1
, t0
, 8);
4576 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4577 tcg_gen_shli_tl(t0
, t0
, 8);
4578 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4579 tcg_gen_or_tl(t0
, t0
, t1
);
4581 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4585 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4588 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4590 #if defined(TARGET_MIPS64)
4593 TCGv t1
= tcg_temp_new();
4595 tcg_gen_shri_tl(t1
, t0
, 8);
4596 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4597 tcg_gen_shli_tl(t0
, t0
, 8);
4598 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4599 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4605 TCGv t1
= tcg_temp_new();
4607 tcg_gen_shri_tl(t1
, t0
, 16);
4608 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4609 tcg_gen_shli_tl(t0
, t0
, 16);
4610 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4611 tcg_gen_or_tl(t0
, t0
, t1
);
4612 tcg_gen_shri_tl(t1
, t0
, 32);
4613 tcg_gen_shli_tl(t0
, t0
, 32);
4614 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4620 MIPS_INVAL("bsfhl");
4621 generate_exception_end(ctx
, EXCP_RI
);
4628 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4637 t0
= tcg_temp_new();
4638 t1
= tcg_temp_new();
4639 gen_load_gpr(t0
, rs
);
4640 gen_load_gpr(t1
, rt
);
4641 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4642 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4643 if (opc
== OPC_LSA
) {
4644 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4653 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4661 t0
= tcg_temp_new();
4662 gen_load_gpr(t0
, rt
);
4666 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4668 #if defined(TARGET_MIPS64)
4670 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4675 TCGv t1
= tcg_temp_new();
4676 gen_load_gpr(t1
, rs
);
4680 TCGv_i64 t2
= tcg_temp_new_i64();
4681 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4682 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4683 gen_move_low32(cpu_gpr
[rd
], t2
);
4684 tcg_temp_free_i64(t2
);
4687 #if defined(TARGET_MIPS64)
4689 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4690 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4691 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4701 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4708 t0
= tcg_temp_new();
4709 gen_load_gpr(t0
, rt
);
4712 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4714 #if defined(TARGET_MIPS64)
4716 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4723 #ifndef CONFIG_USER_ONLY
4724 /* CP0 (MMU and control) */
4725 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4727 TCGv_i64 t0
= tcg_temp_new_i64();
4728 TCGv_i64 t1
= tcg_temp_new_i64();
4730 tcg_gen_ext_tl_i64(t0
, arg
);
4731 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4732 #if defined(TARGET_MIPS64)
4733 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4735 tcg_gen_concat32_i64(t1
, t1
, t0
);
4737 tcg_gen_st_i64(t1
, cpu_env
, off
);
4738 tcg_temp_free_i64(t1
);
4739 tcg_temp_free_i64(t0
);
4742 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4744 TCGv_i64 t0
= tcg_temp_new_i64();
4745 TCGv_i64 t1
= tcg_temp_new_i64();
4747 tcg_gen_ext_tl_i64(t0
, arg
);
4748 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4749 tcg_gen_concat32_i64(t1
, t1
, t0
);
4750 tcg_gen_st_i64(t1
, cpu_env
, off
);
4751 tcg_temp_free_i64(t1
);
4752 tcg_temp_free_i64(t0
);
4755 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4757 TCGv_i64 t0
= tcg_temp_new_i64();
4759 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4760 #if defined(TARGET_MIPS64)
4761 tcg_gen_shri_i64(t0
, t0
, 30);
4763 tcg_gen_shri_i64(t0
, t0
, 32);
4765 gen_move_low32(arg
, t0
);
4766 tcg_temp_free_i64(t0
);
4769 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4771 TCGv_i64 t0
= tcg_temp_new_i64();
4773 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4774 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4775 gen_move_low32(arg
, t0
);
4776 tcg_temp_free_i64(t0
);
4779 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4781 TCGv_i32 t0
= tcg_temp_new_i32();
4783 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4784 tcg_gen_ext_i32_tl(arg
, t0
);
4785 tcg_temp_free_i32(t0
);
4788 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4790 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4791 tcg_gen_ext32s_tl(arg
, arg
);
4794 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4796 TCGv_i32 t0
= tcg_temp_new_i32();
4798 tcg_gen_trunc_tl_i32(t0
, arg
);
4799 tcg_gen_st_i32(t0
, cpu_env
, off
);
4800 tcg_temp_free_i32(t0
);
4803 #define CP0_CHECK(c) \
4806 goto cp0_unimplemented; \
4810 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4812 const char *rn
= "invalid";
4814 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4820 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4824 goto cp0_unimplemented
;
4830 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4834 goto cp0_unimplemented
;
4840 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4841 ctx
->CP0_LLAddr_shift
);
4845 CP0_CHECK(ctx
->mrp
);
4846 gen_helper_mfhc0_maar(arg
, cpu_env
);
4850 goto cp0_unimplemented
;
4859 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4863 goto cp0_unimplemented
;
4867 goto cp0_unimplemented
;
4870 (void)rn
; /* avoid a compiler warning */
4871 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4875 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4876 tcg_gen_movi_tl(arg
, 0);
4879 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4881 const char *rn
= "invalid";
4882 uint64_t mask
= ctx
->PAMask
>> 36;
4884 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4890 tcg_gen_andi_tl(arg
, arg
, mask
);
4891 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4895 goto cp0_unimplemented
;
4901 tcg_gen_andi_tl(arg
, arg
, mask
);
4902 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4906 goto cp0_unimplemented
;
4912 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4913 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4914 relevant for modern MIPS cores supporting MTHC0, therefore
4915 treating MTHC0 to LLAddr as NOP. */
4919 CP0_CHECK(ctx
->mrp
);
4920 gen_helper_mthc0_maar(cpu_env
, arg
);
4924 goto cp0_unimplemented
;
4933 tcg_gen_andi_tl(arg
, arg
, mask
);
4934 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4938 goto cp0_unimplemented
;
4942 goto cp0_unimplemented
;
4945 (void)rn
; /* avoid a compiler warning */
4947 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4950 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4952 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4953 tcg_gen_movi_tl(arg
, 0);
4955 tcg_gen_movi_tl(arg
, ~0);
4959 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4961 const char *rn
= "invalid";
4964 check_insn(ctx
, ISA_MIPS32
);
4970 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4975 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4990 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4994 goto cp0_unimplemented
;
5000 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5001 gen_helper_mfc0_random(arg
, cpu_env
);
5005 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5010 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5011 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5015 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5020 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5021 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5025 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5026 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5030 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5031 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5032 rn
= "VPEScheFBack";
5035 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5036 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5040 goto cp0_unimplemented
;
5047 TCGv_i64 tmp
= tcg_temp_new_i64();
5048 tcg_gen_ld_i64(tmp
, cpu_env
,
5049 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5050 #if defined(TARGET_MIPS64)
5052 /* Move RI/XI fields to bits 31:30 */
5053 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5054 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5057 gen_move_low32(arg
, tmp
);
5058 tcg_temp_free_i64(tmp
);
5063 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5064 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5068 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5069 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5073 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5074 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5078 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5079 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5083 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5084 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5088 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5089 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5093 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5094 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5098 goto cp0_unimplemented
;
5105 TCGv_i64 tmp
= tcg_temp_new_i64();
5106 tcg_gen_ld_i64(tmp
, cpu_env
,
5107 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5108 #if defined(TARGET_MIPS64)
5110 /* Move RI/XI fields to bits 31:30 */
5111 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5112 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5115 gen_move_low32(arg
, tmp
);
5116 tcg_temp_free_i64(tmp
);
5122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5123 rn
= "GlobalNumber";
5126 goto cp0_unimplemented
;
5132 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5133 tcg_gen_ext32s_tl(arg
, arg
);
5137 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5138 rn
= "ContextConfig";
5139 goto cp0_unimplemented
;
5142 CP0_CHECK(ctx
->ulri
);
5143 tcg_gen_ld32s_tl(arg
, cpu_env
,
5144 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5148 goto cp0_unimplemented
;
5154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5158 check_insn(ctx
, ISA_MIPS32R2
);
5159 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5163 goto cp0_unimplemented
;
5169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5173 check_insn(ctx
, ISA_MIPS32R2
);
5174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5178 check_insn(ctx
, ISA_MIPS32R2
);
5179 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5183 check_insn(ctx
, ISA_MIPS32R2
);
5184 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5188 check_insn(ctx
, ISA_MIPS32R2
);
5189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5193 check_insn(ctx
, ISA_MIPS32R2
);
5194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5198 goto cp0_unimplemented
;
5204 check_insn(ctx
, ISA_MIPS32R2
);
5205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5209 goto cp0_unimplemented
;
5215 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5216 tcg_gen_ext32s_tl(arg
, arg
);
5221 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5230 goto cp0_unimplemented
;
5236 /* Mark as an IO operation because we read the time. */
5237 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5240 gen_helper_mfc0_count(arg
, cpu_env
);
5241 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5244 /* Break the TB to be able to take timer interrupts immediately
5245 after reading count. */
5246 ctx
->bstate
= BS_STOP
;
5249 /* 6,7 are implementation dependent */
5251 goto cp0_unimplemented
;
5257 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5258 tcg_gen_ext32s_tl(arg
, arg
);
5262 goto cp0_unimplemented
;
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5271 /* 6,7 are implementation dependent */
5273 goto cp0_unimplemented
;
5279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5283 check_insn(ctx
, ISA_MIPS32R2
);
5284 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5288 check_insn(ctx
, ISA_MIPS32R2
);
5289 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5293 check_insn(ctx
, ISA_MIPS32R2
);
5294 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5298 goto cp0_unimplemented
;
5304 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5308 goto cp0_unimplemented
;
5314 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5315 tcg_gen_ext32s_tl(arg
, arg
);
5319 goto cp0_unimplemented
;
5325 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5329 check_insn(ctx
, ISA_MIPS32R2
);
5330 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5334 check_insn(ctx
, ISA_MIPS32R2
);
5335 CP0_CHECK(ctx
->cmgcr
);
5336 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5337 tcg_gen_ext32s_tl(arg
, arg
);
5341 goto cp0_unimplemented
;
5347 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5363 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5367 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5370 /* 6,7 are implementation dependent */
5372 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5380 goto cp0_unimplemented
;
5386 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5390 CP0_CHECK(ctx
->mrp
);
5391 gen_helper_mfc0_maar(arg
, cpu_env
);
5395 CP0_CHECK(ctx
->mrp
);
5396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5400 goto cp0_unimplemented
;
5406 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5410 goto cp0_unimplemented
;
5416 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5420 goto cp0_unimplemented
;
5426 #if defined(TARGET_MIPS64)
5427 check_insn(ctx
, ISA_MIPS3
);
5428 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5429 tcg_gen_ext32s_tl(arg
, arg
);
5434 goto cp0_unimplemented
;
5438 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5439 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5446 goto cp0_unimplemented
;
5450 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5451 rn
= "'Diagnostic"; /* implementation dependent */
5456 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5460 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5461 rn
= "TraceControl";
5464 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5465 rn
= "TraceControl2";
5468 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5469 rn
= "UserTraceData";
5472 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5476 goto cp0_unimplemented
;
5483 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5484 tcg_gen_ext32s_tl(arg
, arg
);
5488 goto cp0_unimplemented
;
5494 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5495 rn
= "Performance0";
5498 // gen_helper_mfc0_performance1(arg);
5499 rn
= "Performance1";
5502 // gen_helper_mfc0_performance2(arg);
5503 rn
= "Performance2";
5506 // gen_helper_mfc0_performance3(arg);
5507 rn
= "Performance3";
5510 // gen_helper_mfc0_performance4(arg);
5511 rn
= "Performance4";
5514 // gen_helper_mfc0_performance5(arg);
5515 rn
= "Performance5";
5518 // gen_helper_mfc0_performance6(arg);
5519 rn
= "Performance6";
5522 // gen_helper_mfc0_performance7(arg);
5523 rn
= "Performance7";
5526 goto cp0_unimplemented
;
5532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5536 goto cp0_unimplemented
;
5542 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5546 goto cp0_unimplemented
;
5556 TCGv_i64 tmp
= tcg_temp_new_i64();
5557 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5558 gen_move_low32(arg
, tmp
);
5559 tcg_temp_free_i64(tmp
);
5567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5571 goto cp0_unimplemented
;
5580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5587 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5591 goto cp0_unimplemented
;
5597 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5598 tcg_gen_ext32s_tl(arg
, arg
);
5602 goto cp0_unimplemented
;
5609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5613 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5614 tcg_gen_ld_tl(arg
, cpu_env
,
5615 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5616 tcg_gen_ext32s_tl(arg
, arg
);
5620 goto cp0_unimplemented
;
5624 goto cp0_unimplemented
;
5626 (void)rn
; /* avoid a compiler warning */
5627 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5631 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5632 gen_mfc0_unimplemented(ctx
, arg
);
5635 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5637 const char *rn
= "invalid";
5640 check_insn(ctx
, ISA_MIPS32
);
5642 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5650 gen_helper_mtc0_index(cpu_env
, arg
);
5654 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5655 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5659 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5664 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5674 goto cp0_unimplemented
;
5684 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5685 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5689 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5690 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5694 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5695 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5699 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5700 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5704 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5705 tcg_gen_st_tl(arg
, cpu_env
,
5706 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5710 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5711 tcg_gen_st_tl(arg
, cpu_env
,
5712 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5713 rn
= "VPEScheFBack";
5716 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5717 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5721 goto cp0_unimplemented
;
5727 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5731 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5732 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5736 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5737 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5741 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5742 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5747 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5752 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5756 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5757 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5761 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5762 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5766 goto cp0_unimplemented
;
5772 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5778 rn
= "GlobalNumber";
5781 goto cp0_unimplemented
;
5787 gen_helper_mtc0_context(cpu_env
, arg
);
5791 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5792 rn
= "ContextConfig";
5793 goto cp0_unimplemented
;
5796 CP0_CHECK(ctx
->ulri
);
5797 tcg_gen_st_tl(arg
, cpu_env
,
5798 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5802 goto cp0_unimplemented
;
5808 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5812 check_insn(ctx
, ISA_MIPS32R2
);
5813 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5815 ctx
->bstate
= BS_STOP
;
5818 goto cp0_unimplemented
;
5824 gen_helper_mtc0_wired(cpu_env
, arg
);
5828 check_insn(ctx
, ISA_MIPS32R2
);
5829 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5833 check_insn(ctx
, ISA_MIPS32R2
);
5834 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5838 check_insn(ctx
, ISA_MIPS32R2
);
5839 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5843 check_insn(ctx
, ISA_MIPS32R2
);
5844 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5848 check_insn(ctx
, ISA_MIPS32R2
);
5849 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5853 goto cp0_unimplemented
;
5859 check_insn(ctx
, ISA_MIPS32R2
);
5860 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5861 ctx
->bstate
= BS_STOP
;
5865 goto cp0_unimplemented
;
5883 goto cp0_unimplemented
;
5889 gen_helper_mtc0_count(cpu_env
, arg
);
5892 /* 6,7 are implementation dependent */
5894 goto cp0_unimplemented
;
5900 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5904 goto cp0_unimplemented
;
5910 gen_helper_mtc0_compare(cpu_env
, arg
);
5913 /* 6,7 are implementation dependent */
5915 goto cp0_unimplemented
;
5921 save_cpu_state(ctx
, 1);
5922 gen_helper_mtc0_status(cpu_env
, arg
);
5923 /* BS_STOP isn't good enough here, hflags may have changed. */
5924 gen_save_pc(ctx
->pc
+ 4);
5925 ctx
->bstate
= BS_EXCP
;
5929 check_insn(ctx
, ISA_MIPS32R2
);
5930 gen_helper_mtc0_intctl(cpu_env
, arg
);
5931 /* Stop translation as we may have switched the execution mode */
5932 ctx
->bstate
= BS_STOP
;
5936 check_insn(ctx
, ISA_MIPS32R2
);
5937 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5938 /* Stop translation as we may have switched the execution mode */
5939 ctx
->bstate
= BS_STOP
;
5943 check_insn(ctx
, ISA_MIPS32R2
);
5944 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5945 /* Stop translation as we may have switched the execution mode */
5946 ctx
->bstate
= BS_STOP
;
5950 goto cp0_unimplemented
;
5956 save_cpu_state(ctx
, 1);
5957 gen_helper_mtc0_cause(cpu_env
, arg
);
5961 goto cp0_unimplemented
;
5967 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5971 goto cp0_unimplemented
;
5981 check_insn(ctx
, ISA_MIPS32R2
);
5982 gen_helper_mtc0_ebase(cpu_env
, arg
);
5986 goto cp0_unimplemented
;
5992 gen_helper_mtc0_config0(cpu_env
, arg
);
5994 /* Stop translation as we may have switched the execution mode */
5995 ctx
->bstate
= BS_STOP
;
5998 /* ignored, read only */
6002 gen_helper_mtc0_config2(cpu_env
, arg
);
6004 /* Stop translation as we may have switched the execution mode */
6005 ctx
->bstate
= BS_STOP
;
6008 gen_helper_mtc0_config3(cpu_env
, arg
);
6010 /* Stop translation as we may have switched the execution mode */
6011 ctx
->bstate
= BS_STOP
;
6014 gen_helper_mtc0_config4(cpu_env
, arg
);
6016 ctx
->bstate
= BS_STOP
;
6019 gen_helper_mtc0_config5(cpu_env
, arg
);
6021 /* Stop translation as we may have switched the execution mode */
6022 ctx
->bstate
= BS_STOP
;
6024 /* 6,7 are implementation dependent */
6034 rn
= "Invalid config selector";
6035 goto cp0_unimplemented
;
6041 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6045 CP0_CHECK(ctx
->mrp
);
6046 gen_helper_mtc0_maar(cpu_env
, arg
);
6050 CP0_CHECK(ctx
->mrp
);
6051 gen_helper_mtc0_maari(cpu_env
, arg
);
6055 goto cp0_unimplemented
;
6061 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6065 goto cp0_unimplemented
;
6071 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6075 goto cp0_unimplemented
;
6081 #if defined(TARGET_MIPS64)
6082 check_insn(ctx
, ISA_MIPS3
);
6083 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6088 goto cp0_unimplemented
;
6092 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6093 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6096 gen_helper_mtc0_framemask(cpu_env
, arg
);
6100 goto cp0_unimplemented
;
6105 rn
= "Diagnostic"; /* implementation dependent */
6110 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6111 /* BS_STOP isn't good enough here, hflags may have changed. */
6112 gen_save_pc(ctx
->pc
+ 4);
6113 ctx
->bstate
= BS_EXCP
;
6117 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6118 rn
= "TraceControl";
6119 /* Stop translation as we may have switched the execution mode */
6120 ctx
->bstate
= BS_STOP
;
6123 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6124 rn
= "TraceControl2";
6125 /* Stop translation as we may have switched the execution mode */
6126 ctx
->bstate
= BS_STOP
;
6129 /* Stop translation as we may have switched the execution mode */
6130 ctx
->bstate
= BS_STOP
;
6131 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6132 rn
= "UserTraceData";
6133 /* Stop translation as we may have switched the execution mode */
6134 ctx
->bstate
= BS_STOP
;
6137 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6138 /* Stop translation as we may have switched the execution mode */
6139 ctx
->bstate
= BS_STOP
;
6143 goto cp0_unimplemented
;
6150 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6154 goto cp0_unimplemented
;
6160 gen_helper_mtc0_performance0(cpu_env
, arg
);
6161 rn
= "Performance0";
6164 // gen_helper_mtc0_performance1(arg);
6165 rn
= "Performance1";
6168 // gen_helper_mtc0_performance2(arg);
6169 rn
= "Performance2";
6172 // gen_helper_mtc0_performance3(arg);
6173 rn
= "Performance3";
6176 // gen_helper_mtc0_performance4(arg);
6177 rn
= "Performance4";
6180 // gen_helper_mtc0_performance5(arg);
6181 rn
= "Performance5";
6184 // gen_helper_mtc0_performance6(arg);
6185 rn
= "Performance6";
6188 // gen_helper_mtc0_performance7(arg);
6189 rn
= "Performance7";
6192 goto cp0_unimplemented
;
6198 gen_helper_mtc0_errctl(cpu_env
, arg
);
6199 ctx
->bstate
= BS_STOP
;
6203 goto cp0_unimplemented
;
6213 goto cp0_unimplemented
;
6222 gen_helper_mtc0_taglo(cpu_env
, arg
);
6229 gen_helper_mtc0_datalo(cpu_env
, arg
);
6233 goto cp0_unimplemented
;
6242 gen_helper_mtc0_taghi(cpu_env
, arg
);
6249 gen_helper_mtc0_datahi(cpu_env
, arg
);
6254 goto cp0_unimplemented
;
6260 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6264 goto cp0_unimplemented
;
6271 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6275 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6276 tcg_gen_st_tl(arg
, cpu_env
,
6277 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6281 goto cp0_unimplemented
;
6283 /* Stop translation as we may have switched the execution mode */
6284 ctx
->bstate
= BS_STOP
;
6287 goto cp0_unimplemented
;
6289 (void)rn
; /* avoid a compiler warning */
6290 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6291 /* For simplicity assume that all writes can cause interrupts. */
6292 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6294 ctx
->bstate
= BS_STOP
;
6299 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6302 #if defined(TARGET_MIPS64)
6303 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6305 const char *rn
= "invalid";
6308 check_insn(ctx
, ISA_MIPS64
);
6314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6318 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6319 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6323 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6324 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6328 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6329 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6338 goto cp0_unimplemented
;
6344 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6345 gen_helper_mfc0_random(arg
, cpu_env
);
6349 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6350 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6354 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6359 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6364 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6365 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6369 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6370 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6374 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6375 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6376 rn
= "VPEScheFBack";
6379 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6384 goto cp0_unimplemented
;
6390 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6394 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6395 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6399 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6400 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6404 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6405 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6409 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6410 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6414 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6415 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6419 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6420 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6424 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6425 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6429 goto cp0_unimplemented
;
6435 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6441 rn
= "GlobalNumber";
6444 goto cp0_unimplemented
;
6450 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6454 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6455 rn
= "ContextConfig";
6456 goto cp0_unimplemented
;
6459 CP0_CHECK(ctx
->ulri
);
6460 tcg_gen_ld_tl(arg
, cpu_env
,
6461 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6465 goto cp0_unimplemented
;
6471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6475 check_insn(ctx
, ISA_MIPS32R2
);
6476 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6480 goto cp0_unimplemented
;
6486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6490 check_insn(ctx
, ISA_MIPS32R2
);
6491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6495 check_insn(ctx
, ISA_MIPS32R2
);
6496 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6500 check_insn(ctx
, ISA_MIPS32R2
);
6501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6505 check_insn(ctx
, ISA_MIPS32R2
);
6506 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6510 check_insn(ctx
, ISA_MIPS32R2
);
6511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6515 goto cp0_unimplemented
;
6521 check_insn(ctx
, ISA_MIPS32R2
);
6522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6526 goto cp0_unimplemented
;
6532 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6542 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6546 goto cp0_unimplemented
;
6552 /* Mark as an IO operation because we read the time. */
6553 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6556 gen_helper_mfc0_count(arg
, cpu_env
);
6557 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6560 /* Break the TB to be able to take timer interrupts immediately
6561 after reading count. */
6562 ctx
->bstate
= BS_STOP
;
6565 /* 6,7 are implementation dependent */
6567 goto cp0_unimplemented
;
6573 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6577 goto cp0_unimplemented
;
6583 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6586 /* 6,7 are implementation dependent */
6588 goto cp0_unimplemented
;
6594 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6598 check_insn(ctx
, ISA_MIPS32R2
);
6599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6603 check_insn(ctx
, ISA_MIPS32R2
);
6604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6608 check_insn(ctx
, ISA_MIPS32R2
);
6609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6613 goto cp0_unimplemented
;
6619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6623 goto cp0_unimplemented
;
6629 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6633 goto cp0_unimplemented
;
6639 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6643 check_insn(ctx
, ISA_MIPS32R2
);
6644 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6648 check_insn(ctx
, ISA_MIPS32R2
);
6649 CP0_CHECK(ctx
->cmgcr
);
6650 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6654 goto cp0_unimplemented
;
6660 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6668 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6676 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6683 /* 6,7 are implementation dependent */
6685 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6689 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6693 goto cp0_unimplemented
;
6699 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6703 CP0_CHECK(ctx
->mrp
);
6704 gen_helper_dmfc0_maar(arg
, cpu_env
);
6708 CP0_CHECK(ctx
->mrp
);
6709 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6713 goto cp0_unimplemented
;
6719 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6723 goto cp0_unimplemented
;
6729 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6733 goto cp0_unimplemented
;
6739 check_insn(ctx
, ISA_MIPS3
);
6740 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6744 goto cp0_unimplemented
;
6748 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6749 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6752 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6756 goto cp0_unimplemented
;
6760 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6761 rn
= "'Diagnostic"; /* implementation dependent */
6766 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6770 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6771 rn
= "TraceControl";
6774 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6775 rn
= "TraceControl2";
6778 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6779 rn
= "UserTraceData";
6782 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6786 goto cp0_unimplemented
;
6793 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6797 goto cp0_unimplemented
;
6803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6804 rn
= "Performance0";
6807 // gen_helper_dmfc0_performance1(arg);
6808 rn
= "Performance1";
6811 // gen_helper_dmfc0_performance2(arg);
6812 rn
= "Performance2";
6815 // gen_helper_dmfc0_performance3(arg);
6816 rn
= "Performance3";
6819 // gen_helper_dmfc0_performance4(arg);
6820 rn
= "Performance4";
6823 // gen_helper_dmfc0_performance5(arg);
6824 rn
= "Performance5";
6827 // gen_helper_dmfc0_performance6(arg);
6828 rn
= "Performance6";
6831 // gen_helper_dmfc0_performance7(arg);
6832 rn
= "Performance7";
6835 goto cp0_unimplemented
;
6841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6845 goto cp0_unimplemented
;
6852 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6856 goto cp0_unimplemented
;
6865 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6872 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6876 goto cp0_unimplemented
;
6885 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6892 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6896 goto cp0_unimplemented
;
6902 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6906 goto cp0_unimplemented
;
6913 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6917 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6918 tcg_gen_ld_tl(arg
, cpu_env
,
6919 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6923 goto cp0_unimplemented
;
6927 goto cp0_unimplemented
;
6929 (void)rn
; /* avoid a compiler warning */
6930 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6934 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6935 gen_mfc0_unimplemented(ctx
, arg
);
6938 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6940 const char *rn
= "invalid";
6943 check_insn(ctx
, ISA_MIPS64
);
6945 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6953 gen_helper_mtc0_index(cpu_env
, arg
);
6957 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6958 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6962 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6977 goto cp0_unimplemented
;
6987 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6988 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6992 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6993 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6997 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6998 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7002 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7003 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7007 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7008 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7013 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7014 rn
= "VPEScheFBack";
7017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7018 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7022 goto cp0_unimplemented
;
7028 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7033 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7038 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7043 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7048 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7053 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7058 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7063 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7067 goto cp0_unimplemented
;
7073 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7079 rn
= "GlobalNumber";
7082 goto cp0_unimplemented
;
7088 gen_helper_mtc0_context(cpu_env
, arg
);
7092 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7093 rn
= "ContextConfig";
7094 goto cp0_unimplemented
;
7097 CP0_CHECK(ctx
->ulri
);
7098 tcg_gen_st_tl(arg
, cpu_env
,
7099 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7103 goto cp0_unimplemented
;
7109 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7113 check_insn(ctx
, ISA_MIPS32R2
);
7114 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7118 goto cp0_unimplemented
;
7124 gen_helper_mtc0_wired(cpu_env
, arg
);
7128 check_insn(ctx
, ISA_MIPS32R2
);
7129 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7133 check_insn(ctx
, ISA_MIPS32R2
);
7134 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7138 check_insn(ctx
, ISA_MIPS32R2
);
7139 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7143 check_insn(ctx
, ISA_MIPS32R2
);
7144 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7148 check_insn(ctx
, ISA_MIPS32R2
);
7149 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7153 goto cp0_unimplemented
;
7159 check_insn(ctx
, ISA_MIPS32R2
);
7160 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7161 ctx
->bstate
= BS_STOP
;
7165 goto cp0_unimplemented
;
7183 goto cp0_unimplemented
;
7189 gen_helper_mtc0_count(cpu_env
, arg
);
7192 /* 6,7 are implementation dependent */
7194 goto cp0_unimplemented
;
7196 /* Stop translation as we may have switched the execution mode */
7197 ctx
->bstate
= BS_STOP
;
7202 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7206 goto cp0_unimplemented
;
7212 gen_helper_mtc0_compare(cpu_env
, arg
);
7215 /* 6,7 are implementation dependent */
7217 goto cp0_unimplemented
;
7219 /* Stop translation as we may have switched the execution mode */
7220 ctx
->bstate
= BS_STOP
;
7225 save_cpu_state(ctx
, 1);
7226 gen_helper_mtc0_status(cpu_env
, arg
);
7227 /* BS_STOP isn't good enough here, hflags may have changed. */
7228 gen_save_pc(ctx
->pc
+ 4);
7229 ctx
->bstate
= BS_EXCP
;
7233 check_insn(ctx
, ISA_MIPS32R2
);
7234 gen_helper_mtc0_intctl(cpu_env
, arg
);
7235 /* Stop translation as we may have switched the execution mode */
7236 ctx
->bstate
= BS_STOP
;
7240 check_insn(ctx
, ISA_MIPS32R2
);
7241 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7242 /* Stop translation as we may have switched the execution mode */
7243 ctx
->bstate
= BS_STOP
;
7247 check_insn(ctx
, ISA_MIPS32R2
);
7248 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7249 /* Stop translation as we may have switched the execution mode */
7250 ctx
->bstate
= BS_STOP
;
7254 goto cp0_unimplemented
;
7260 save_cpu_state(ctx
, 1);
7261 /* Mark as an IO operation because we may trigger a software
7263 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7266 gen_helper_mtc0_cause(cpu_env
, arg
);
7267 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7270 /* Stop translation as we may have triggered an intetrupt */
7271 ctx
->bstate
= BS_STOP
;
7275 goto cp0_unimplemented
;
7281 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7285 goto cp0_unimplemented
;
7295 check_insn(ctx
, ISA_MIPS32R2
);
7296 gen_helper_mtc0_ebase(cpu_env
, arg
);
7300 goto cp0_unimplemented
;
7306 gen_helper_mtc0_config0(cpu_env
, arg
);
7308 /* Stop translation as we may have switched the execution mode */
7309 ctx
->bstate
= BS_STOP
;
7312 /* ignored, read only */
7316 gen_helper_mtc0_config2(cpu_env
, arg
);
7318 /* Stop translation as we may have switched the execution mode */
7319 ctx
->bstate
= BS_STOP
;
7322 gen_helper_mtc0_config3(cpu_env
, arg
);
7324 /* Stop translation as we may have switched the execution mode */
7325 ctx
->bstate
= BS_STOP
;
7328 /* currently ignored */
7332 gen_helper_mtc0_config5(cpu_env
, arg
);
7334 /* Stop translation as we may have switched the execution mode */
7335 ctx
->bstate
= BS_STOP
;
7337 /* 6,7 are implementation dependent */
7339 rn
= "Invalid config selector";
7340 goto cp0_unimplemented
;
7346 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7350 CP0_CHECK(ctx
->mrp
);
7351 gen_helper_mtc0_maar(cpu_env
, arg
);
7355 CP0_CHECK(ctx
->mrp
);
7356 gen_helper_mtc0_maari(cpu_env
, arg
);
7360 goto cp0_unimplemented
;
7366 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7370 goto cp0_unimplemented
;
7376 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7380 goto cp0_unimplemented
;
7386 check_insn(ctx
, ISA_MIPS3
);
7387 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7391 goto cp0_unimplemented
;
7395 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7396 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7399 gen_helper_mtc0_framemask(cpu_env
, arg
);
7403 goto cp0_unimplemented
;
7408 rn
= "Diagnostic"; /* implementation dependent */
7413 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7414 /* BS_STOP isn't good enough here, hflags may have changed. */
7415 gen_save_pc(ctx
->pc
+ 4);
7416 ctx
->bstate
= BS_EXCP
;
7420 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7421 /* Stop translation as we may have switched the execution mode */
7422 ctx
->bstate
= BS_STOP
;
7423 rn
= "TraceControl";
7426 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7427 /* Stop translation as we may have switched the execution mode */
7428 ctx
->bstate
= BS_STOP
;
7429 rn
= "TraceControl2";
7432 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7433 /* Stop translation as we may have switched the execution mode */
7434 ctx
->bstate
= BS_STOP
;
7435 rn
= "UserTraceData";
7438 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7439 /* Stop translation as we may have switched the execution mode */
7440 ctx
->bstate
= BS_STOP
;
7444 goto cp0_unimplemented
;
7451 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7455 goto cp0_unimplemented
;
7461 gen_helper_mtc0_performance0(cpu_env
, arg
);
7462 rn
= "Performance0";
7465 // gen_helper_mtc0_performance1(cpu_env, arg);
7466 rn
= "Performance1";
7469 // gen_helper_mtc0_performance2(cpu_env, arg);
7470 rn
= "Performance2";
7473 // gen_helper_mtc0_performance3(cpu_env, arg);
7474 rn
= "Performance3";
7477 // gen_helper_mtc0_performance4(cpu_env, arg);
7478 rn
= "Performance4";
7481 // gen_helper_mtc0_performance5(cpu_env, arg);
7482 rn
= "Performance5";
7485 // gen_helper_mtc0_performance6(cpu_env, arg);
7486 rn
= "Performance6";
7489 // gen_helper_mtc0_performance7(cpu_env, arg);
7490 rn
= "Performance7";
7493 goto cp0_unimplemented
;
7499 gen_helper_mtc0_errctl(cpu_env
, arg
);
7500 ctx
->bstate
= BS_STOP
;
7504 goto cp0_unimplemented
;
7514 goto cp0_unimplemented
;
7523 gen_helper_mtc0_taglo(cpu_env
, arg
);
7530 gen_helper_mtc0_datalo(cpu_env
, arg
);
7534 goto cp0_unimplemented
;
7543 gen_helper_mtc0_taghi(cpu_env
, arg
);
7550 gen_helper_mtc0_datahi(cpu_env
, arg
);
7555 goto cp0_unimplemented
;
7561 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7565 goto cp0_unimplemented
;
7572 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7576 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7577 tcg_gen_st_tl(arg
, cpu_env
,
7578 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7582 goto cp0_unimplemented
;
7584 /* Stop translation as we may have switched the execution mode */
7585 ctx
->bstate
= BS_STOP
;
7588 goto cp0_unimplemented
;
7590 (void)rn
; /* avoid a compiler warning */
7591 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7592 /* For simplicity assume that all writes can cause interrupts. */
7593 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7595 ctx
->bstate
= BS_STOP
;
7600 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7602 #endif /* TARGET_MIPS64 */
7604 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7605 int u
, int sel
, int h
)
7607 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7608 TCGv t0
= tcg_temp_local_new();
7610 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7611 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7612 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7613 tcg_gen_movi_tl(t0
, -1);
7614 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7615 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7616 tcg_gen_movi_tl(t0
, -1);
7622 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7625 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7635 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7638 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7641 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7644 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7647 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7650 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7653 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7656 gen_mfc0(ctx
, t0
, rt
, sel
);
7663 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7666 gen_mfc0(ctx
, t0
, rt
, sel
);
7672 gen_helper_mftc0_status(t0
, cpu_env
);
7675 gen_mfc0(ctx
, t0
, rt
, sel
);
7681 gen_helper_mftc0_cause(t0
, cpu_env
);
7691 gen_helper_mftc0_epc(t0
, cpu_env
);
7701 gen_helper_mftc0_ebase(t0
, cpu_env
);
7711 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7721 gen_helper_mftc0_debug(t0
, cpu_env
);
7724 gen_mfc0(ctx
, t0
, rt
, sel
);
7729 gen_mfc0(ctx
, t0
, rt
, sel
);
7731 } else switch (sel
) {
7732 /* GPR registers. */
7734 gen_helper_1e0i(mftgpr
, t0
, rt
);
7736 /* Auxiliary CPU registers */
7740 gen_helper_1e0i(mftlo
, t0
, 0);
7743 gen_helper_1e0i(mfthi
, t0
, 0);
7746 gen_helper_1e0i(mftacx
, t0
, 0);
7749 gen_helper_1e0i(mftlo
, t0
, 1);
7752 gen_helper_1e0i(mfthi
, t0
, 1);
7755 gen_helper_1e0i(mftacx
, t0
, 1);
7758 gen_helper_1e0i(mftlo
, t0
, 2);
7761 gen_helper_1e0i(mfthi
, t0
, 2);
7764 gen_helper_1e0i(mftacx
, t0
, 2);
7767 gen_helper_1e0i(mftlo
, t0
, 3);
7770 gen_helper_1e0i(mfthi
, t0
, 3);
7773 gen_helper_1e0i(mftacx
, t0
, 3);
7776 gen_helper_mftdsp(t0
, cpu_env
);
7782 /* Floating point (COP1). */
7784 /* XXX: For now we support only a single FPU context. */
7786 TCGv_i32 fp0
= tcg_temp_new_i32();
7788 gen_load_fpr32(ctx
, fp0
, rt
);
7789 tcg_gen_ext_i32_tl(t0
, fp0
);
7790 tcg_temp_free_i32(fp0
);
7792 TCGv_i32 fp0
= tcg_temp_new_i32();
7794 gen_load_fpr32h(ctx
, fp0
, rt
);
7795 tcg_gen_ext_i32_tl(t0
, fp0
);
7796 tcg_temp_free_i32(fp0
);
7800 /* XXX: For now we support only a single FPU context. */
7801 gen_helper_1e0i(cfc1
, t0
, rt
);
7803 /* COP2: Not implemented. */
7810 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7811 gen_store_gpr(t0
, rd
);
7817 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7818 generate_exception_end(ctx
, EXCP_RI
);
7821 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7822 int u
, int sel
, int h
)
7824 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7825 TCGv t0
= tcg_temp_local_new();
7827 gen_load_gpr(t0
, rt
);
7828 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7829 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7830 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7832 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7833 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7840 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7843 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7853 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7856 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7859 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7862 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7865 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7868 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7871 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7874 gen_mtc0(ctx
, t0
, rd
, sel
);
7881 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7884 gen_mtc0(ctx
, t0
, rd
, sel
);
7890 gen_helper_mttc0_status(cpu_env
, t0
);
7893 gen_mtc0(ctx
, t0
, rd
, sel
);
7899 gen_helper_mttc0_cause(cpu_env
, t0
);
7909 gen_helper_mttc0_ebase(cpu_env
, t0
);
7919 gen_helper_mttc0_debug(cpu_env
, t0
);
7922 gen_mtc0(ctx
, t0
, rd
, sel
);
7927 gen_mtc0(ctx
, t0
, rd
, sel
);
7929 } else switch (sel
) {
7930 /* GPR registers. */
7932 gen_helper_0e1i(mttgpr
, t0
, rd
);
7934 /* Auxiliary CPU registers */
7938 gen_helper_0e1i(mttlo
, t0
, 0);
7941 gen_helper_0e1i(mtthi
, t0
, 0);
7944 gen_helper_0e1i(mttacx
, t0
, 0);
7947 gen_helper_0e1i(mttlo
, t0
, 1);
7950 gen_helper_0e1i(mtthi
, t0
, 1);
7953 gen_helper_0e1i(mttacx
, t0
, 1);
7956 gen_helper_0e1i(mttlo
, t0
, 2);
7959 gen_helper_0e1i(mtthi
, t0
, 2);
7962 gen_helper_0e1i(mttacx
, t0
, 2);
7965 gen_helper_0e1i(mttlo
, t0
, 3);
7968 gen_helper_0e1i(mtthi
, t0
, 3);
7971 gen_helper_0e1i(mttacx
, t0
, 3);
7974 gen_helper_mttdsp(cpu_env
, t0
);
7980 /* Floating point (COP1). */
7982 /* XXX: For now we support only a single FPU context. */
7984 TCGv_i32 fp0
= tcg_temp_new_i32();
7986 tcg_gen_trunc_tl_i32(fp0
, t0
);
7987 gen_store_fpr32(ctx
, fp0
, rd
);
7988 tcg_temp_free_i32(fp0
);
7990 TCGv_i32 fp0
= tcg_temp_new_i32();
7992 tcg_gen_trunc_tl_i32(fp0
, t0
);
7993 gen_store_fpr32h(ctx
, fp0
, rd
);
7994 tcg_temp_free_i32(fp0
);
7998 /* XXX: For now we support only a single FPU context. */
8000 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8002 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8003 tcg_temp_free_i32(fs_tmp
);
8005 /* Stop translation as we may have changed hflags */
8006 ctx
->bstate
= BS_STOP
;
8008 /* COP2: Not implemented. */
8015 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8021 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8022 generate_exception_end(ctx
, EXCP_RI
);
8025 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8027 const char *opn
= "ldst";
8029 check_cp0_enabled(ctx
);
8036 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8041 TCGv t0
= tcg_temp_new();
8043 gen_load_gpr(t0
, rt
);
8044 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8049 #if defined(TARGET_MIPS64)
8051 check_insn(ctx
, ISA_MIPS3
);
8056 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8060 check_insn(ctx
, ISA_MIPS3
);
8062 TCGv t0
= tcg_temp_new();
8064 gen_load_gpr(t0
, rt
);
8065 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8077 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8083 TCGv t0
= tcg_temp_new();
8084 gen_load_gpr(t0
, rt
);
8085 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8091 check_insn(ctx
, ASE_MT
);
8096 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8097 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8101 check_insn(ctx
, ASE_MT
);
8102 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8103 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8108 if (!env
->tlb
->helper_tlbwi
)
8110 gen_helper_tlbwi(cpu_env
);
8115 if (!env
->tlb
->helper_tlbinv
) {
8118 gen_helper_tlbinv(cpu_env
);
8119 } /* treat as nop if TLBINV not supported */
8124 if (!env
->tlb
->helper_tlbinvf
) {
8127 gen_helper_tlbinvf(cpu_env
);
8128 } /* treat as nop if TLBINV not supported */
8132 if (!env
->tlb
->helper_tlbwr
)
8134 gen_helper_tlbwr(cpu_env
);
8138 if (!env
->tlb
->helper_tlbp
)
8140 gen_helper_tlbp(cpu_env
);
8144 if (!env
->tlb
->helper_tlbr
)
8146 gen_helper_tlbr(cpu_env
);
8148 case OPC_ERET
: /* OPC_ERETNC */
8149 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8150 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8153 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8154 if (ctx
->opcode
& (1 << bit_shift
)) {
8157 check_insn(ctx
, ISA_MIPS32R5
);
8158 gen_helper_eretnc(cpu_env
);
8162 check_insn(ctx
, ISA_MIPS2
);
8163 gen_helper_eret(cpu_env
);
8165 ctx
->bstate
= BS_EXCP
;
8170 check_insn(ctx
, ISA_MIPS32
);
8171 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8172 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8175 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8177 generate_exception_end(ctx
, EXCP_RI
);
8179 gen_helper_deret(cpu_env
);
8180 ctx
->bstate
= BS_EXCP
;
8185 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8186 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8187 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8190 /* If we get an exception, we want to restart at next instruction */
8192 save_cpu_state(ctx
, 1);
8194 gen_helper_wait(cpu_env
);
8195 ctx
->bstate
= BS_EXCP
;
8200 generate_exception_end(ctx
, EXCP_RI
);
8203 (void)opn
; /* avoid a compiler warning */
8205 #endif /* !CONFIG_USER_ONLY */
8207 /* CP1 Branches (before delay slot) */
8208 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8209 int32_t cc
, int32_t offset
)
8211 target_ulong btarget
;
8212 TCGv_i32 t0
= tcg_temp_new_i32();
8214 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8215 generate_exception_end(ctx
, EXCP_RI
);
8220 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8222 btarget
= ctx
->pc
+ 4 + offset
;
8226 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8227 tcg_gen_not_i32(t0
, t0
);
8228 tcg_gen_andi_i32(t0
, t0
, 1);
8229 tcg_gen_extu_i32_tl(bcond
, t0
);
8232 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8233 tcg_gen_not_i32(t0
, t0
);
8234 tcg_gen_andi_i32(t0
, t0
, 1);
8235 tcg_gen_extu_i32_tl(bcond
, t0
);
8238 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8239 tcg_gen_andi_i32(t0
, t0
, 1);
8240 tcg_gen_extu_i32_tl(bcond
, t0
);
8243 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8244 tcg_gen_andi_i32(t0
, t0
, 1);
8245 tcg_gen_extu_i32_tl(bcond
, t0
);
8247 ctx
->hflags
|= MIPS_HFLAG_BL
;
8251 TCGv_i32 t1
= tcg_temp_new_i32();
8252 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8253 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8254 tcg_gen_nand_i32(t0
, t0
, t1
);
8255 tcg_temp_free_i32(t1
);
8256 tcg_gen_andi_i32(t0
, t0
, 1);
8257 tcg_gen_extu_i32_tl(bcond
, t0
);
8262 TCGv_i32 t1
= tcg_temp_new_i32();
8263 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8264 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8265 tcg_gen_or_i32(t0
, t0
, t1
);
8266 tcg_temp_free_i32(t1
);
8267 tcg_gen_andi_i32(t0
, t0
, 1);
8268 tcg_gen_extu_i32_tl(bcond
, t0
);
8273 TCGv_i32 t1
= tcg_temp_new_i32();
8274 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8275 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8276 tcg_gen_and_i32(t0
, t0
, t1
);
8277 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8278 tcg_gen_and_i32(t0
, t0
, t1
);
8279 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8280 tcg_gen_nand_i32(t0
, t0
, t1
);
8281 tcg_temp_free_i32(t1
);
8282 tcg_gen_andi_i32(t0
, t0
, 1);
8283 tcg_gen_extu_i32_tl(bcond
, t0
);
8288 TCGv_i32 t1
= tcg_temp_new_i32();
8289 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8290 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8291 tcg_gen_or_i32(t0
, t0
, t1
);
8292 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8293 tcg_gen_or_i32(t0
, t0
, t1
);
8294 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8295 tcg_gen_or_i32(t0
, t0
, t1
);
8296 tcg_temp_free_i32(t1
);
8297 tcg_gen_andi_i32(t0
, t0
, 1);
8298 tcg_gen_extu_i32_tl(bcond
, t0
);
8301 ctx
->hflags
|= MIPS_HFLAG_BC
;
8304 MIPS_INVAL("cp1 cond branch");
8305 generate_exception_end(ctx
, EXCP_RI
);
8308 ctx
->btarget
= btarget
;
8309 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8311 tcg_temp_free_i32(t0
);
8314 /* R6 CP1 Branches */
8315 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8316 int32_t ft
, int32_t offset
,
8319 target_ulong btarget
;
8320 TCGv_i64 t0
= tcg_temp_new_i64();
8322 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8323 #ifdef MIPS_DEBUG_DISAS
8324 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8327 generate_exception_end(ctx
, EXCP_RI
);
8331 gen_load_fpr64(ctx
, t0
, ft
);
8332 tcg_gen_andi_i64(t0
, t0
, 1);
8334 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8338 tcg_gen_xori_i64(t0
, t0
, 1);
8339 ctx
->hflags
|= MIPS_HFLAG_BC
;
8342 /* t0 already set */
8343 ctx
->hflags
|= MIPS_HFLAG_BC
;
8346 MIPS_INVAL("cp1 cond branch");
8347 generate_exception_end(ctx
, EXCP_RI
);
8351 tcg_gen_trunc_i64_tl(bcond
, t0
);
8353 ctx
->btarget
= btarget
;
8355 switch (delayslot_size
) {
8357 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8360 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8365 tcg_temp_free_i64(t0
);
8368 /* Coprocessor 1 (FPU) */
8370 #define FOP(func, fmt) (((fmt) << 21) | (func))
8373 OPC_ADD_S
= FOP(0, FMT_S
),
8374 OPC_SUB_S
= FOP(1, FMT_S
),
8375 OPC_MUL_S
= FOP(2, FMT_S
),
8376 OPC_DIV_S
= FOP(3, FMT_S
),
8377 OPC_SQRT_S
= FOP(4, FMT_S
),
8378 OPC_ABS_S
= FOP(5, FMT_S
),
8379 OPC_MOV_S
= FOP(6, FMT_S
),
8380 OPC_NEG_S
= FOP(7, FMT_S
),
8381 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8382 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8383 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8384 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8385 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8386 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8387 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8388 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8389 OPC_SEL_S
= FOP(16, FMT_S
),
8390 OPC_MOVCF_S
= FOP(17, FMT_S
),
8391 OPC_MOVZ_S
= FOP(18, FMT_S
),
8392 OPC_MOVN_S
= FOP(19, FMT_S
),
8393 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8394 OPC_RECIP_S
= FOP(21, FMT_S
),
8395 OPC_RSQRT_S
= FOP(22, FMT_S
),
8396 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8397 OPC_MADDF_S
= FOP(24, FMT_S
),
8398 OPC_MSUBF_S
= FOP(25, FMT_S
),
8399 OPC_RINT_S
= FOP(26, FMT_S
),
8400 OPC_CLASS_S
= FOP(27, FMT_S
),
8401 OPC_MIN_S
= FOP(28, FMT_S
),
8402 OPC_RECIP2_S
= FOP(28, FMT_S
),
8403 OPC_MINA_S
= FOP(29, FMT_S
),
8404 OPC_RECIP1_S
= FOP(29, FMT_S
),
8405 OPC_MAX_S
= FOP(30, FMT_S
),
8406 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8407 OPC_MAXA_S
= FOP(31, FMT_S
),
8408 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8409 OPC_CVT_D_S
= FOP(33, FMT_S
),
8410 OPC_CVT_W_S
= FOP(36, FMT_S
),
8411 OPC_CVT_L_S
= FOP(37, FMT_S
),
8412 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8413 OPC_CMP_F_S
= FOP (48, FMT_S
),
8414 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8415 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8416 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8417 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8418 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8419 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8420 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8421 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8422 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8423 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8424 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8425 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8426 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8427 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8428 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8430 OPC_ADD_D
= FOP(0, FMT_D
),
8431 OPC_SUB_D
= FOP(1, FMT_D
),
8432 OPC_MUL_D
= FOP(2, FMT_D
),
8433 OPC_DIV_D
= FOP(3, FMT_D
),
8434 OPC_SQRT_D
= FOP(4, FMT_D
),
8435 OPC_ABS_D
= FOP(5, FMT_D
),
8436 OPC_MOV_D
= FOP(6, FMT_D
),
8437 OPC_NEG_D
= FOP(7, FMT_D
),
8438 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8439 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8440 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8441 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8442 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8443 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8444 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8445 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8446 OPC_SEL_D
= FOP(16, FMT_D
),
8447 OPC_MOVCF_D
= FOP(17, FMT_D
),
8448 OPC_MOVZ_D
= FOP(18, FMT_D
),
8449 OPC_MOVN_D
= FOP(19, FMT_D
),
8450 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8451 OPC_RECIP_D
= FOP(21, FMT_D
),
8452 OPC_RSQRT_D
= FOP(22, FMT_D
),
8453 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8454 OPC_MADDF_D
= FOP(24, FMT_D
),
8455 OPC_MSUBF_D
= FOP(25, FMT_D
),
8456 OPC_RINT_D
= FOP(26, FMT_D
),
8457 OPC_CLASS_D
= FOP(27, FMT_D
),
8458 OPC_MIN_D
= FOP(28, FMT_D
),
8459 OPC_RECIP2_D
= FOP(28, FMT_D
),
8460 OPC_MINA_D
= FOP(29, FMT_D
),
8461 OPC_RECIP1_D
= FOP(29, FMT_D
),
8462 OPC_MAX_D
= FOP(30, FMT_D
),
8463 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8464 OPC_MAXA_D
= FOP(31, FMT_D
),
8465 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8466 OPC_CVT_S_D
= FOP(32, FMT_D
),
8467 OPC_CVT_W_D
= FOP(36, FMT_D
),
8468 OPC_CVT_L_D
= FOP(37, FMT_D
),
8469 OPC_CMP_F_D
= FOP (48, FMT_D
),
8470 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8471 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8472 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8473 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8474 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8475 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8476 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8477 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8478 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8479 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8480 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8481 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8482 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8483 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8484 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8486 OPC_CVT_S_W
= FOP(32, FMT_W
),
8487 OPC_CVT_D_W
= FOP(33, FMT_W
),
8488 OPC_CVT_S_L
= FOP(32, FMT_L
),
8489 OPC_CVT_D_L
= FOP(33, FMT_L
),
8490 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8492 OPC_ADD_PS
= FOP(0, FMT_PS
),
8493 OPC_SUB_PS
= FOP(1, FMT_PS
),
8494 OPC_MUL_PS
= FOP(2, FMT_PS
),
8495 OPC_DIV_PS
= FOP(3, FMT_PS
),
8496 OPC_ABS_PS
= FOP(5, FMT_PS
),
8497 OPC_MOV_PS
= FOP(6, FMT_PS
),
8498 OPC_NEG_PS
= FOP(7, FMT_PS
),
8499 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8500 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8501 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8502 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8503 OPC_MULR_PS
= FOP(26, FMT_PS
),
8504 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8505 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8506 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8507 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8509 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8510 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8511 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8512 OPC_PLL_PS
= FOP(44, FMT_PS
),
8513 OPC_PLU_PS
= FOP(45, FMT_PS
),
8514 OPC_PUL_PS
= FOP(46, FMT_PS
),
8515 OPC_PUU_PS
= FOP(47, FMT_PS
),
8516 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8517 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8518 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8519 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8520 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8521 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8522 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8523 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8524 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8525 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8526 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8527 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8528 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8529 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8530 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8531 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8535 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8536 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8537 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8538 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8539 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8540 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8541 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8542 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8543 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8544 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8545 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8546 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8547 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8548 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8549 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8550 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8551 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8552 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8553 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8554 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8555 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8556 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8558 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8559 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8560 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8561 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8562 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8563 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8564 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8565 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8566 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8567 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8568 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8569 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8570 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8571 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8572 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8573 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8574 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8575 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8576 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8577 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8578 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8579 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8581 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8583 TCGv t0
= tcg_temp_new();
8588 TCGv_i32 fp0
= tcg_temp_new_i32();
8590 gen_load_fpr32(ctx
, fp0
, fs
);
8591 tcg_gen_ext_i32_tl(t0
, fp0
);
8592 tcg_temp_free_i32(fp0
);
8594 gen_store_gpr(t0
, rt
);
8597 gen_load_gpr(t0
, rt
);
8599 TCGv_i32 fp0
= tcg_temp_new_i32();
8601 tcg_gen_trunc_tl_i32(fp0
, t0
);
8602 gen_store_fpr32(ctx
, fp0
, fs
);
8603 tcg_temp_free_i32(fp0
);
8607 gen_helper_1e0i(cfc1
, t0
, fs
);
8608 gen_store_gpr(t0
, rt
);
8611 gen_load_gpr(t0
, rt
);
8612 save_cpu_state(ctx
, 0);
8614 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8616 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8617 tcg_temp_free_i32(fs_tmp
);
8619 /* Stop translation as we may have changed hflags */
8620 ctx
->bstate
= BS_STOP
;
8622 #if defined(TARGET_MIPS64)
8624 gen_load_fpr64(ctx
, t0
, fs
);
8625 gen_store_gpr(t0
, rt
);
8628 gen_load_gpr(t0
, rt
);
8629 gen_store_fpr64(ctx
, t0
, fs
);
8634 TCGv_i32 fp0
= tcg_temp_new_i32();
8636 gen_load_fpr32h(ctx
, fp0
, fs
);
8637 tcg_gen_ext_i32_tl(t0
, fp0
);
8638 tcg_temp_free_i32(fp0
);
8640 gen_store_gpr(t0
, rt
);
8643 gen_load_gpr(t0
, rt
);
8645 TCGv_i32 fp0
= tcg_temp_new_i32();
8647 tcg_gen_trunc_tl_i32(fp0
, t0
);
8648 gen_store_fpr32h(ctx
, fp0
, fs
);
8649 tcg_temp_free_i32(fp0
);
8653 MIPS_INVAL("cp1 move");
8654 generate_exception_end(ctx
, EXCP_RI
);
8662 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8678 l1
= gen_new_label();
8679 t0
= tcg_temp_new_i32();
8680 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8681 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8682 tcg_temp_free_i32(t0
);
8684 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8686 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8691 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8695 TCGv_i32 t0
= tcg_temp_new_i32();
8696 TCGLabel
*l1
= gen_new_label();
8703 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8704 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8705 gen_load_fpr32(ctx
, t0
, fs
);
8706 gen_store_fpr32(ctx
, t0
, fd
);
8708 tcg_temp_free_i32(t0
);
8711 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8714 TCGv_i32 t0
= tcg_temp_new_i32();
8716 TCGLabel
*l1
= gen_new_label();
8723 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8724 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8725 tcg_temp_free_i32(t0
);
8726 fp0
= tcg_temp_new_i64();
8727 gen_load_fpr64(ctx
, fp0
, fs
);
8728 gen_store_fpr64(ctx
, fp0
, fd
);
8729 tcg_temp_free_i64(fp0
);
8733 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8737 TCGv_i32 t0
= tcg_temp_new_i32();
8738 TCGLabel
*l1
= gen_new_label();
8739 TCGLabel
*l2
= gen_new_label();
8746 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8747 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8748 gen_load_fpr32(ctx
, t0
, fs
);
8749 gen_store_fpr32(ctx
, t0
, fd
);
8752 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8753 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8754 gen_load_fpr32h(ctx
, t0
, fs
);
8755 gen_store_fpr32h(ctx
, t0
, fd
);
8756 tcg_temp_free_i32(t0
);
8760 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8763 TCGv_i32 t1
= tcg_const_i32(0);
8764 TCGv_i32 fp0
= tcg_temp_new_i32();
8765 TCGv_i32 fp1
= tcg_temp_new_i32();
8766 TCGv_i32 fp2
= tcg_temp_new_i32();
8767 gen_load_fpr32(ctx
, fp0
, fd
);
8768 gen_load_fpr32(ctx
, fp1
, ft
);
8769 gen_load_fpr32(ctx
, fp2
, fs
);
8773 tcg_gen_andi_i32(fp0
, fp0
, 1);
8774 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8777 tcg_gen_andi_i32(fp1
, fp1
, 1);
8778 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8781 tcg_gen_andi_i32(fp1
, fp1
, 1);
8782 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8785 MIPS_INVAL("gen_sel_s");
8786 generate_exception_end(ctx
, EXCP_RI
);
8790 gen_store_fpr32(ctx
, fp0
, fd
);
8791 tcg_temp_free_i32(fp2
);
8792 tcg_temp_free_i32(fp1
);
8793 tcg_temp_free_i32(fp0
);
8794 tcg_temp_free_i32(t1
);
8797 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8800 TCGv_i64 t1
= tcg_const_i64(0);
8801 TCGv_i64 fp0
= tcg_temp_new_i64();
8802 TCGv_i64 fp1
= tcg_temp_new_i64();
8803 TCGv_i64 fp2
= tcg_temp_new_i64();
8804 gen_load_fpr64(ctx
, fp0
, fd
);
8805 gen_load_fpr64(ctx
, fp1
, ft
);
8806 gen_load_fpr64(ctx
, fp2
, fs
);
8810 tcg_gen_andi_i64(fp0
, fp0
, 1);
8811 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8814 tcg_gen_andi_i64(fp1
, fp1
, 1);
8815 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8818 tcg_gen_andi_i64(fp1
, fp1
, 1);
8819 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8822 MIPS_INVAL("gen_sel_d");
8823 generate_exception_end(ctx
, EXCP_RI
);
8827 gen_store_fpr64(ctx
, fp0
, fd
);
8828 tcg_temp_free_i64(fp2
);
8829 tcg_temp_free_i64(fp1
);
8830 tcg_temp_free_i64(fp0
);
8831 tcg_temp_free_i64(t1
);
8834 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8835 int ft
, int fs
, int fd
, int cc
)
8837 uint32_t func
= ctx
->opcode
& 0x3f;
8841 TCGv_i32 fp0
= tcg_temp_new_i32();
8842 TCGv_i32 fp1
= tcg_temp_new_i32();
8844 gen_load_fpr32(ctx
, fp0
, fs
);
8845 gen_load_fpr32(ctx
, fp1
, ft
);
8846 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8847 tcg_temp_free_i32(fp1
);
8848 gen_store_fpr32(ctx
, fp0
, fd
);
8849 tcg_temp_free_i32(fp0
);
8854 TCGv_i32 fp0
= tcg_temp_new_i32();
8855 TCGv_i32 fp1
= tcg_temp_new_i32();
8857 gen_load_fpr32(ctx
, fp0
, fs
);
8858 gen_load_fpr32(ctx
, fp1
, ft
);
8859 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8860 tcg_temp_free_i32(fp1
);
8861 gen_store_fpr32(ctx
, fp0
, fd
);
8862 tcg_temp_free_i32(fp0
);
8867 TCGv_i32 fp0
= tcg_temp_new_i32();
8868 TCGv_i32 fp1
= tcg_temp_new_i32();
8870 gen_load_fpr32(ctx
, fp0
, fs
);
8871 gen_load_fpr32(ctx
, fp1
, ft
);
8872 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8873 tcg_temp_free_i32(fp1
);
8874 gen_store_fpr32(ctx
, fp0
, fd
);
8875 tcg_temp_free_i32(fp0
);
8880 TCGv_i32 fp0
= tcg_temp_new_i32();
8881 TCGv_i32 fp1
= tcg_temp_new_i32();
8883 gen_load_fpr32(ctx
, fp0
, fs
);
8884 gen_load_fpr32(ctx
, fp1
, ft
);
8885 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8886 tcg_temp_free_i32(fp1
);
8887 gen_store_fpr32(ctx
, fp0
, fd
);
8888 tcg_temp_free_i32(fp0
);
8893 TCGv_i32 fp0
= tcg_temp_new_i32();
8895 gen_load_fpr32(ctx
, fp0
, fs
);
8896 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8897 gen_store_fpr32(ctx
, fp0
, fd
);
8898 tcg_temp_free_i32(fp0
);
8903 TCGv_i32 fp0
= tcg_temp_new_i32();
8905 gen_load_fpr32(ctx
, fp0
, fs
);
8907 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
8909 gen_helper_float_abs_s(fp0
, fp0
);
8911 gen_store_fpr32(ctx
, fp0
, fd
);
8912 tcg_temp_free_i32(fp0
);
8917 TCGv_i32 fp0
= tcg_temp_new_i32();
8919 gen_load_fpr32(ctx
, fp0
, fs
);
8920 gen_store_fpr32(ctx
, fp0
, fd
);
8921 tcg_temp_free_i32(fp0
);
8926 TCGv_i32 fp0
= tcg_temp_new_i32();
8928 gen_load_fpr32(ctx
, fp0
, fs
);
8930 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
8932 gen_helper_float_chs_s(fp0
, fp0
);
8934 gen_store_fpr32(ctx
, fp0
, fd
);
8935 tcg_temp_free_i32(fp0
);
8939 check_cp1_64bitmode(ctx
);
8941 TCGv_i32 fp32
= tcg_temp_new_i32();
8942 TCGv_i64 fp64
= tcg_temp_new_i64();
8944 gen_load_fpr32(ctx
, fp32
, fs
);
8946 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
8948 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
8950 tcg_temp_free_i32(fp32
);
8951 gen_store_fpr64(ctx
, fp64
, fd
);
8952 tcg_temp_free_i64(fp64
);
8956 check_cp1_64bitmode(ctx
);
8958 TCGv_i32 fp32
= tcg_temp_new_i32();
8959 TCGv_i64 fp64
= tcg_temp_new_i64();
8961 gen_load_fpr32(ctx
, fp32
, fs
);
8963 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
8965 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
8967 tcg_temp_free_i32(fp32
);
8968 gen_store_fpr64(ctx
, fp64
, fd
);
8969 tcg_temp_free_i64(fp64
);
8973 check_cp1_64bitmode(ctx
);
8975 TCGv_i32 fp32
= tcg_temp_new_i32();
8976 TCGv_i64 fp64
= tcg_temp_new_i64();
8978 gen_load_fpr32(ctx
, fp32
, fs
);
8980 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
8982 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
8984 tcg_temp_free_i32(fp32
);
8985 gen_store_fpr64(ctx
, fp64
, fd
);
8986 tcg_temp_free_i64(fp64
);
8990 check_cp1_64bitmode(ctx
);
8992 TCGv_i32 fp32
= tcg_temp_new_i32();
8993 TCGv_i64 fp64
= tcg_temp_new_i64();
8995 gen_load_fpr32(ctx
, fp32
, fs
);
8997 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
8999 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9001 tcg_temp_free_i32(fp32
);
9002 gen_store_fpr64(ctx
, fp64
, fd
);
9003 tcg_temp_free_i64(fp64
);
9008 TCGv_i32 fp0
= tcg_temp_new_i32();
9010 gen_load_fpr32(ctx
, fp0
, fs
);
9012 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9014 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9016 gen_store_fpr32(ctx
, fp0
, fd
);
9017 tcg_temp_free_i32(fp0
);
9022 TCGv_i32 fp0
= tcg_temp_new_i32();
9024 gen_load_fpr32(ctx
, fp0
, fs
);
9026 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9028 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9030 gen_store_fpr32(ctx
, fp0
, fd
);
9031 tcg_temp_free_i32(fp0
);
9036 TCGv_i32 fp0
= tcg_temp_new_i32();
9038 gen_load_fpr32(ctx
, fp0
, fs
);
9040 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9042 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9044 gen_store_fpr32(ctx
, fp0
, fd
);
9045 tcg_temp_free_i32(fp0
);
9050 TCGv_i32 fp0
= tcg_temp_new_i32();
9052 gen_load_fpr32(ctx
, fp0
, fs
);
9054 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9056 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9058 gen_store_fpr32(ctx
, fp0
, fd
);
9059 tcg_temp_free_i32(fp0
);
9063 check_insn(ctx
, ISA_MIPS32R6
);
9064 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9067 check_insn(ctx
, ISA_MIPS32R6
);
9068 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9071 check_insn(ctx
, ISA_MIPS32R6
);
9072 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9075 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9076 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9079 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9081 TCGLabel
*l1
= gen_new_label();
9085 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9087 fp0
= tcg_temp_new_i32();
9088 gen_load_fpr32(ctx
, fp0
, fs
);
9089 gen_store_fpr32(ctx
, fp0
, fd
);
9090 tcg_temp_free_i32(fp0
);
9095 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9097 TCGLabel
*l1
= gen_new_label();
9101 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9102 fp0
= tcg_temp_new_i32();
9103 gen_load_fpr32(ctx
, fp0
, fs
);
9104 gen_store_fpr32(ctx
, fp0
, fd
);
9105 tcg_temp_free_i32(fp0
);
9112 TCGv_i32 fp0
= tcg_temp_new_i32();
9114 gen_load_fpr32(ctx
, fp0
, fs
);
9115 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9116 gen_store_fpr32(ctx
, fp0
, fd
);
9117 tcg_temp_free_i32(fp0
);
9122 TCGv_i32 fp0
= tcg_temp_new_i32();
9124 gen_load_fpr32(ctx
, fp0
, fs
);
9125 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9126 gen_store_fpr32(ctx
, fp0
, fd
);
9127 tcg_temp_free_i32(fp0
);
9131 check_insn(ctx
, ISA_MIPS32R6
);
9133 TCGv_i32 fp0
= tcg_temp_new_i32();
9134 TCGv_i32 fp1
= tcg_temp_new_i32();
9135 TCGv_i32 fp2
= tcg_temp_new_i32();
9136 gen_load_fpr32(ctx
, fp0
, fs
);
9137 gen_load_fpr32(ctx
, fp1
, ft
);
9138 gen_load_fpr32(ctx
, fp2
, fd
);
9139 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9140 gen_store_fpr32(ctx
, fp2
, fd
);
9141 tcg_temp_free_i32(fp2
);
9142 tcg_temp_free_i32(fp1
);
9143 tcg_temp_free_i32(fp0
);
9147 check_insn(ctx
, ISA_MIPS32R6
);
9149 TCGv_i32 fp0
= tcg_temp_new_i32();
9150 TCGv_i32 fp1
= tcg_temp_new_i32();
9151 TCGv_i32 fp2
= tcg_temp_new_i32();
9152 gen_load_fpr32(ctx
, fp0
, fs
);
9153 gen_load_fpr32(ctx
, fp1
, ft
);
9154 gen_load_fpr32(ctx
, fp2
, fd
);
9155 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9156 gen_store_fpr32(ctx
, fp2
, fd
);
9157 tcg_temp_free_i32(fp2
);
9158 tcg_temp_free_i32(fp1
);
9159 tcg_temp_free_i32(fp0
);
9163 check_insn(ctx
, ISA_MIPS32R6
);
9165 TCGv_i32 fp0
= tcg_temp_new_i32();
9166 gen_load_fpr32(ctx
, fp0
, fs
);
9167 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9168 gen_store_fpr32(ctx
, fp0
, fd
);
9169 tcg_temp_free_i32(fp0
);
9173 check_insn(ctx
, ISA_MIPS32R6
);
9175 TCGv_i32 fp0
= tcg_temp_new_i32();
9176 gen_load_fpr32(ctx
, fp0
, fs
);
9177 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9178 gen_store_fpr32(ctx
, fp0
, fd
);
9179 tcg_temp_free_i32(fp0
);
9182 case OPC_MIN_S
: /* OPC_RECIP2_S */
9183 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9185 TCGv_i32 fp0
= tcg_temp_new_i32();
9186 TCGv_i32 fp1
= tcg_temp_new_i32();
9187 TCGv_i32 fp2
= tcg_temp_new_i32();
9188 gen_load_fpr32(ctx
, fp0
, fs
);
9189 gen_load_fpr32(ctx
, fp1
, ft
);
9190 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9191 gen_store_fpr32(ctx
, fp2
, fd
);
9192 tcg_temp_free_i32(fp2
);
9193 tcg_temp_free_i32(fp1
);
9194 tcg_temp_free_i32(fp0
);
9197 check_cp1_64bitmode(ctx
);
9199 TCGv_i32 fp0
= tcg_temp_new_i32();
9200 TCGv_i32 fp1
= tcg_temp_new_i32();
9202 gen_load_fpr32(ctx
, fp0
, fs
);
9203 gen_load_fpr32(ctx
, fp1
, ft
);
9204 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9205 tcg_temp_free_i32(fp1
);
9206 gen_store_fpr32(ctx
, fp0
, fd
);
9207 tcg_temp_free_i32(fp0
);
9211 case OPC_MINA_S
: /* OPC_RECIP1_S */
9212 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9214 TCGv_i32 fp0
= tcg_temp_new_i32();
9215 TCGv_i32 fp1
= tcg_temp_new_i32();
9216 TCGv_i32 fp2
= tcg_temp_new_i32();
9217 gen_load_fpr32(ctx
, fp0
, fs
);
9218 gen_load_fpr32(ctx
, fp1
, ft
);
9219 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9220 gen_store_fpr32(ctx
, fp2
, fd
);
9221 tcg_temp_free_i32(fp2
);
9222 tcg_temp_free_i32(fp1
);
9223 tcg_temp_free_i32(fp0
);
9226 check_cp1_64bitmode(ctx
);
9228 TCGv_i32 fp0
= tcg_temp_new_i32();
9230 gen_load_fpr32(ctx
, fp0
, fs
);
9231 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9232 gen_store_fpr32(ctx
, fp0
, fd
);
9233 tcg_temp_free_i32(fp0
);
9237 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9238 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9240 TCGv_i32 fp0
= tcg_temp_new_i32();
9241 TCGv_i32 fp1
= tcg_temp_new_i32();
9242 gen_load_fpr32(ctx
, fp0
, fs
);
9243 gen_load_fpr32(ctx
, fp1
, ft
);
9244 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9245 gen_store_fpr32(ctx
, fp1
, fd
);
9246 tcg_temp_free_i32(fp1
);
9247 tcg_temp_free_i32(fp0
);
9250 check_cp1_64bitmode(ctx
);
9252 TCGv_i32 fp0
= tcg_temp_new_i32();
9254 gen_load_fpr32(ctx
, fp0
, fs
);
9255 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9256 gen_store_fpr32(ctx
, fp0
, fd
);
9257 tcg_temp_free_i32(fp0
);
9261 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9262 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9264 TCGv_i32 fp0
= tcg_temp_new_i32();
9265 TCGv_i32 fp1
= tcg_temp_new_i32();
9266 gen_load_fpr32(ctx
, fp0
, fs
);
9267 gen_load_fpr32(ctx
, fp1
, ft
);
9268 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9269 gen_store_fpr32(ctx
, fp1
, fd
);
9270 tcg_temp_free_i32(fp1
);
9271 tcg_temp_free_i32(fp0
);
9274 check_cp1_64bitmode(ctx
);
9276 TCGv_i32 fp0
= tcg_temp_new_i32();
9277 TCGv_i32 fp1
= tcg_temp_new_i32();
9279 gen_load_fpr32(ctx
, fp0
, fs
);
9280 gen_load_fpr32(ctx
, fp1
, ft
);
9281 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9282 tcg_temp_free_i32(fp1
);
9283 gen_store_fpr32(ctx
, fp0
, fd
);
9284 tcg_temp_free_i32(fp0
);
9289 check_cp1_registers(ctx
, fd
);
9291 TCGv_i32 fp32
= tcg_temp_new_i32();
9292 TCGv_i64 fp64
= tcg_temp_new_i64();
9294 gen_load_fpr32(ctx
, fp32
, fs
);
9295 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9296 tcg_temp_free_i32(fp32
);
9297 gen_store_fpr64(ctx
, fp64
, fd
);
9298 tcg_temp_free_i64(fp64
);
9303 TCGv_i32 fp0
= tcg_temp_new_i32();
9305 gen_load_fpr32(ctx
, fp0
, fs
);
9307 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9309 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9311 gen_store_fpr32(ctx
, fp0
, fd
);
9312 tcg_temp_free_i32(fp0
);
9316 check_cp1_64bitmode(ctx
);
9318 TCGv_i32 fp32
= tcg_temp_new_i32();
9319 TCGv_i64 fp64
= tcg_temp_new_i64();
9321 gen_load_fpr32(ctx
, fp32
, fs
);
9323 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9325 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9327 tcg_temp_free_i32(fp32
);
9328 gen_store_fpr64(ctx
, fp64
, fd
);
9329 tcg_temp_free_i64(fp64
);
9335 TCGv_i64 fp64
= tcg_temp_new_i64();
9336 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9337 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9339 gen_load_fpr32(ctx
, fp32_0
, fs
);
9340 gen_load_fpr32(ctx
, fp32_1
, ft
);
9341 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9342 tcg_temp_free_i32(fp32_1
);
9343 tcg_temp_free_i32(fp32_0
);
9344 gen_store_fpr64(ctx
, fp64
, fd
);
9345 tcg_temp_free_i64(fp64
);
9357 case OPC_CMP_NGLE_S
:
9364 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9365 if (ctx
->opcode
& (1 << 6)) {
9366 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9368 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9372 check_cp1_registers(ctx
, fs
| ft
| fd
);
9374 TCGv_i64 fp0
= tcg_temp_new_i64();
9375 TCGv_i64 fp1
= tcg_temp_new_i64();
9377 gen_load_fpr64(ctx
, fp0
, fs
);
9378 gen_load_fpr64(ctx
, fp1
, ft
);
9379 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9380 tcg_temp_free_i64(fp1
);
9381 gen_store_fpr64(ctx
, fp0
, fd
);
9382 tcg_temp_free_i64(fp0
);
9386 check_cp1_registers(ctx
, fs
| ft
| fd
);
9388 TCGv_i64 fp0
= tcg_temp_new_i64();
9389 TCGv_i64 fp1
= tcg_temp_new_i64();
9391 gen_load_fpr64(ctx
, fp0
, fs
);
9392 gen_load_fpr64(ctx
, fp1
, ft
);
9393 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9394 tcg_temp_free_i64(fp1
);
9395 gen_store_fpr64(ctx
, fp0
, fd
);
9396 tcg_temp_free_i64(fp0
);
9400 check_cp1_registers(ctx
, fs
| ft
| fd
);
9402 TCGv_i64 fp0
= tcg_temp_new_i64();
9403 TCGv_i64 fp1
= tcg_temp_new_i64();
9405 gen_load_fpr64(ctx
, fp0
, fs
);
9406 gen_load_fpr64(ctx
, fp1
, ft
);
9407 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9408 tcg_temp_free_i64(fp1
);
9409 gen_store_fpr64(ctx
, fp0
, fd
);
9410 tcg_temp_free_i64(fp0
);
9414 check_cp1_registers(ctx
, fs
| ft
| fd
);
9416 TCGv_i64 fp0
= tcg_temp_new_i64();
9417 TCGv_i64 fp1
= tcg_temp_new_i64();
9419 gen_load_fpr64(ctx
, fp0
, fs
);
9420 gen_load_fpr64(ctx
, fp1
, ft
);
9421 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9422 tcg_temp_free_i64(fp1
);
9423 gen_store_fpr64(ctx
, fp0
, fd
);
9424 tcg_temp_free_i64(fp0
);
9428 check_cp1_registers(ctx
, fs
| fd
);
9430 TCGv_i64 fp0
= tcg_temp_new_i64();
9432 gen_load_fpr64(ctx
, fp0
, fs
);
9433 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9434 gen_store_fpr64(ctx
, fp0
, fd
);
9435 tcg_temp_free_i64(fp0
);
9439 check_cp1_registers(ctx
, fs
| fd
);
9441 TCGv_i64 fp0
= tcg_temp_new_i64();
9443 gen_load_fpr64(ctx
, fp0
, fs
);
9445 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9447 gen_helper_float_abs_d(fp0
, fp0
);
9449 gen_store_fpr64(ctx
, fp0
, fd
);
9450 tcg_temp_free_i64(fp0
);
9454 check_cp1_registers(ctx
, fs
| fd
);
9456 TCGv_i64 fp0
= tcg_temp_new_i64();
9458 gen_load_fpr64(ctx
, fp0
, fs
);
9459 gen_store_fpr64(ctx
, fp0
, fd
);
9460 tcg_temp_free_i64(fp0
);
9464 check_cp1_registers(ctx
, fs
| fd
);
9466 TCGv_i64 fp0
= tcg_temp_new_i64();
9468 gen_load_fpr64(ctx
, fp0
, fs
);
9470 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9472 gen_helper_float_chs_d(fp0
, fp0
);
9474 gen_store_fpr64(ctx
, fp0
, fd
);
9475 tcg_temp_free_i64(fp0
);
9479 check_cp1_64bitmode(ctx
);
9481 TCGv_i64 fp0
= tcg_temp_new_i64();
9483 gen_load_fpr64(ctx
, fp0
, fs
);
9485 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9487 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9489 gen_store_fpr64(ctx
, fp0
, fd
);
9490 tcg_temp_free_i64(fp0
);
9494 check_cp1_64bitmode(ctx
);
9496 TCGv_i64 fp0
= tcg_temp_new_i64();
9498 gen_load_fpr64(ctx
, fp0
, fs
);
9500 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9502 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9504 gen_store_fpr64(ctx
, fp0
, fd
);
9505 tcg_temp_free_i64(fp0
);
9509 check_cp1_64bitmode(ctx
);
9511 TCGv_i64 fp0
= tcg_temp_new_i64();
9513 gen_load_fpr64(ctx
, fp0
, fs
);
9515 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9517 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9519 gen_store_fpr64(ctx
, fp0
, fd
);
9520 tcg_temp_free_i64(fp0
);
9524 check_cp1_64bitmode(ctx
);
9526 TCGv_i64 fp0
= tcg_temp_new_i64();
9528 gen_load_fpr64(ctx
, fp0
, fs
);
9530 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9532 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9534 gen_store_fpr64(ctx
, fp0
, fd
);
9535 tcg_temp_free_i64(fp0
);
9539 check_cp1_registers(ctx
, fs
);
9541 TCGv_i32 fp32
= tcg_temp_new_i32();
9542 TCGv_i64 fp64
= tcg_temp_new_i64();
9544 gen_load_fpr64(ctx
, fp64
, fs
);
9546 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9548 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9550 tcg_temp_free_i64(fp64
);
9551 gen_store_fpr32(ctx
, fp32
, fd
);
9552 tcg_temp_free_i32(fp32
);
9556 check_cp1_registers(ctx
, fs
);
9558 TCGv_i32 fp32
= tcg_temp_new_i32();
9559 TCGv_i64 fp64
= tcg_temp_new_i64();
9561 gen_load_fpr64(ctx
, fp64
, fs
);
9563 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9565 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9567 tcg_temp_free_i64(fp64
);
9568 gen_store_fpr32(ctx
, fp32
, fd
);
9569 tcg_temp_free_i32(fp32
);
9573 check_cp1_registers(ctx
, fs
);
9575 TCGv_i32 fp32
= tcg_temp_new_i32();
9576 TCGv_i64 fp64
= tcg_temp_new_i64();
9578 gen_load_fpr64(ctx
, fp64
, fs
);
9580 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9582 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9584 tcg_temp_free_i64(fp64
);
9585 gen_store_fpr32(ctx
, fp32
, fd
);
9586 tcg_temp_free_i32(fp32
);
9590 check_cp1_registers(ctx
, fs
);
9592 TCGv_i32 fp32
= tcg_temp_new_i32();
9593 TCGv_i64 fp64
= tcg_temp_new_i64();
9595 gen_load_fpr64(ctx
, fp64
, fs
);
9597 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9599 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9601 tcg_temp_free_i64(fp64
);
9602 gen_store_fpr32(ctx
, fp32
, fd
);
9603 tcg_temp_free_i32(fp32
);
9607 check_insn(ctx
, ISA_MIPS32R6
);
9608 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9611 check_insn(ctx
, ISA_MIPS32R6
);
9612 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9615 check_insn(ctx
, ISA_MIPS32R6
);
9616 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9619 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9620 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9623 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9625 TCGLabel
*l1
= gen_new_label();
9629 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9631 fp0
= tcg_temp_new_i64();
9632 gen_load_fpr64(ctx
, fp0
, fs
);
9633 gen_store_fpr64(ctx
, fp0
, fd
);
9634 tcg_temp_free_i64(fp0
);
9639 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9641 TCGLabel
*l1
= gen_new_label();
9645 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9646 fp0
= tcg_temp_new_i64();
9647 gen_load_fpr64(ctx
, fp0
, fs
);
9648 gen_store_fpr64(ctx
, fp0
, fd
);
9649 tcg_temp_free_i64(fp0
);
9655 check_cp1_registers(ctx
, fs
| fd
);
9657 TCGv_i64 fp0
= tcg_temp_new_i64();
9659 gen_load_fpr64(ctx
, fp0
, fs
);
9660 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9661 gen_store_fpr64(ctx
, fp0
, fd
);
9662 tcg_temp_free_i64(fp0
);
9666 check_cp1_registers(ctx
, fs
| fd
);
9668 TCGv_i64 fp0
= tcg_temp_new_i64();
9670 gen_load_fpr64(ctx
, fp0
, fs
);
9671 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9672 gen_store_fpr64(ctx
, fp0
, fd
);
9673 tcg_temp_free_i64(fp0
);
9677 check_insn(ctx
, ISA_MIPS32R6
);
9679 TCGv_i64 fp0
= tcg_temp_new_i64();
9680 TCGv_i64 fp1
= tcg_temp_new_i64();
9681 TCGv_i64 fp2
= tcg_temp_new_i64();
9682 gen_load_fpr64(ctx
, fp0
, fs
);
9683 gen_load_fpr64(ctx
, fp1
, ft
);
9684 gen_load_fpr64(ctx
, fp2
, fd
);
9685 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9686 gen_store_fpr64(ctx
, fp2
, fd
);
9687 tcg_temp_free_i64(fp2
);
9688 tcg_temp_free_i64(fp1
);
9689 tcg_temp_free_i64(fp0
);
9693 check_insn(ctx
, ISA_MIPS32R6
);
9695 TCGv_i64 fp0
= tcg_temp_new_i64();
9696 TCGv_i64 fp1
= tcg_temp_new_i64();
9697 TCGv_i64 fp2
= tcg_temp_new_i64();
9698 gen_load_fpr64(ctx
, fp0
, fs
);
9699 gen_load_fpr64(ctx
, fp1
, ft
);
9700 gen_load_fpr64(ctx
, fp2
, fd
);
9701 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9702 gen_store_fpr64(ctx
, fp2
, fd
);
9703 tcg_temp_free_i64(fp2
);
9704 tcg_temp_free_i64(fp1
);
9705 tcg_temp_free_i64(fp0
);
9709 check_insn(ctx
, ISA_MIPS32R6
);
9711 TCGv_i64 fp0
= tcg_temp_new_i64();
9712 gen_load_fpr64(ctx
, fp0
, fs
);
9713 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9714 gen_store_fpr64(ctx
, fp0
, fd
);
9715 tcg_temp_free_i64(fp0
);
9719 check_insn(ctx
, ISA_MIPS32R6
);
9721 TCGv_i64 fp0
= tcg_temp_new_i64();
9722 gen_load_fpr64(ctx
, fp0
, fs
);
9723 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9724 gen_store_fpr64(ctx
, fp0
, fd
);
9725 tcg_temp_free_i64(fp0
);
9728 case OPC_MIN_D
: /* OPC_RECIP2_D */
9729 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9731 TCGv_i64 fp0
= tcg_temp_new_i64();
9732 TCGv_i64 fp1
= tcg_temp_new_i64();
9733 gen_load_fpr64(ctx
, fp0
, fs
);
9734 gen_load_fpr64(ctx
, fp1
, ft
);
9735 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9736 gen_store_fpr64(ctx
, fp1
, fd
);
9737 tcg_temp_free_i64(fp1
);
9738 tcg_temp_free_i64(fp0
);
9741 check_cp1_64bitmode(ctx
);
9743 TCGv_i64 fp0
= tcg_temp_new_i64();
9744 TCGv_i64 fp1
= tcg_temp_new_i64();
9746 gen_load_fpr64(ctx
, fp0
, fs
);
9747 gen_load_fpr64(ctx
, fp1
, ft
);
9748 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9749 tcg_temp_free_i64(fp1
);
9750 gen_store_fpr64(ctx
, fp0
, fd
);
9751 tcg_temp_free_i64(fp0
);
9755 case OPC_MINA_D
: /* OPC_RECIP1_D */
9756 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9758 TCGv_i64 fp0
= tcg_temp_new_i64();
9759 TCGv_i64 fp1
= tcg_temp_new_i64();
9760 gen_load_fpr64(ctx
, fp0
, fs
);
9761 gen_load_fpr64(ctx
, fp1
, ft
);
9762 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9763 gen_store_fpr64(ctx
, fp1
, fd
);
9764 tcg_temp_free_i64(fp1
);
9765 tcg_temp_free_i64(fp0
);
9768 check_cp1_64bitmode(ctx
);
9770 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 gen_load_fpr64(ctx
, fp0
, fs
);
9773 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9774 gen_store_fpr64(ctx
, fp0
, fd
);
9775 tcg_temp_free_i64(fp0
);
9779 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9780 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9782 TCGv_i64 fp0
= tcg_temp_new_i64();
9783 TCGv_i64 fp1
= tcg_temp_new_i64();
9784 gen_load_fpr64(ctx
, fp0
, fs
);
9785 gen_load_fpr64(ctx
, fp1
, ft
);
9786 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9787 gen_store_fpr64(ctx
, fp1
, fd
);
9788 tcg_temp_free_i64(fp1
);
9789 tcg_temp_free_i64(fp0
);
9792 check_cp1_64bitmode(ctx
);
9794 TCGv_i64 fp0
= tcg_temp_new_i64();
9796 gen_load_fpr64(ctx
, fp0
, fs
);
9797 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9798 gen_store_fpr64(ctx
, fp0
, fd
);
9799 tcg_temp_free_i64(fp0
);
9803 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9804 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9806 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 TCGv_i64 fp1
= tcg_temp_new_i64();
9808 gen_load_fpr64(ctx
, fp0
, fs
);
9809 gen_load_fpr64(ctx
, fp1
, ft
);
9810 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9811 gen_store_fpr64(ctx
, fp1
, fd
);
9812 tcg_temp_free_i64(fp1
);
9813 tcg_temp_free_i64(fp0
);
9816 check_cp1_64bitmode(ctx
);
9818 TCGv_i64 fp0
= tcg_temp_new_i64();
9819 TCGv_i64 fp1
= tcg_temp_new_i64();
9821 gen_load_fpr64(ctx
, fp0
, fs
);
9822 gen_load_fpr64(ctx
, fp1
, ft
);
9823 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9824 tcg_temp_free_i64(fp1
);
9825 gen_store_fpr64(ctx
, fp0
, fd
);
9826 tcg_temp_free_i64(fp0
);
9839 case OPC_CMP_NGLE_D
:
9846 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9847 if (ctx
->opcode
& (1 << 6)) {
9848 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9850 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9854 check_cp1_registers(ctx
, fs
);
9856 TCGv_i32 fp32
= tcg_temp_new_i32();
9857 TCGv_i64 fp64
= tcg_temp_new_i64();
9859 gen_load_fpr64(ctx
, fp64
, fs
);
9860 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9861 tcg_temp_free_i64(fp64
);
9862 gen_store_fpr32(ctx
, fp32
, fd
);
9863 tcg_temp_free_i32(fp32
);
9867 check_cp1_registers(ctx
, fs
);
9869 TCGv_i32 fp32
= tcg_temp_new_i32();
9870 TCGv_i64 fp64
= tcg_temp_new_i64();
9872 gen_load_fpr64(ctx
, fp64
, fs
);
9874 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
9876 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
9878 tcg_temp_free_i64(fp64
);
9879 gen_store_fpr32(ctx
, fp32
, fd
);
9880 tcg_temp_free_i32(fp32
);
9884 check_cp1_64bitmode(ctx
);
9886 TCGv_i64 fp0
= tcg_temp_new_i64();
9888 gen_load_fpr64(ctx
, fp0
, fs
);
9890 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
9892 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
9894 gen_store_fpr64(ctx
, fp0
, fd
);
9895 tcg_temp_free_i64(fp0
);
9900 TCGv_i32 fp0
= tcg_temp_new_i32();
9902 gen_load_fpr32(ctx
, fp0
, fs
);
9903 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9904 gen_store_fpr32(ctx
, fp0
, fd
);
9905 tcg_temp_free_i32(fp0
);
9909 check_cp1_registers(ctx
, fd
);
9911 TCGv_i32 fp32
= tcg_temp_new_i32();
9912 TCGv_i64 fp64
= tcg_temp_new_i64();
9914 gen_load_fpr32(ctx
, fp32
, fs
);
9915 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9916 tcg_temp_free_i32(fp32
);
9917 gen_store_fpr64(ctx
, fp64
, fd
);
9918 tcg_temp_free_i64(fp64
);
9922 check_cp1_64bitmode(ctx
);
9924 TCGv_i32 fp32
= tcg_temp_new_i32();
9925 TCGv_i64 fp64
= tcg_temp_new_i64();
9927 gen_load_fpr64(ctx
, fp64
, fs
);
9928 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9929 tcg_temp_free_i64(fp64
);
9930 gen_store_fpr32(ctx
, fp32
, fd
);
9931 tcg_temp_free_i32(fp32
);
9935 check_cp1_64bitmode(ctx
);
9937 TCGv_i64 fp0
= tcg_temp_new_i64();
9939 gen_load_fpr64(ctx
, fp0
, fs
);
9940 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9941 gen_store_fpr64(ctx
, fp0
, fd
);
9942 tcg_temp_free_i64(fp0
);
9948 TCGv_i64 fp0
= tcg_temp_new_i64();
9950 gen_load_fpr64(ctx
, fp0
, fs
);
9951 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9952 gen_store_fpr64(ctx
, fp0
, fd
);
9953 tcg_temp_free_i64(fp0
);
9959 TCGv_i64 fp0
= tcg_temp_new_i64();
9960 TCGv_i64 fp1
= tcg_temp_new_i64();
9962 gen_load_fpr64(ctx
, fp0
, fs
);
9963 gen_load_fpr64(ctx
, fp1
, ft
);
9964 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9965 tcg_temp_free_i64(fp1
);
9966 gen_store_fpr64(ctx
, fp0
, fd
);
9967 tcg_temp_free_i64(fp0
);
9973 TCGv_i64 fp0
= tcg_temp_new_i64();
9974 TCGv_i64 fp1
= tcg_temp_new_i64();
9976 gen_load_fpr64(ctx
, fp0
, fs
);
9977 gen_load_fpr64(ctx
, fp1
, ft
);
9978 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9979 tcg_temp_free_i64(fp1
);
9980 gen_store_fpr64(ctx
, fp0
, fd
);
9981 tcg_temp_free_i64(fp0
);
9987 TCGv_i64 fp0
= tcg_temp_new_i64();
9988 TCGv_i64 fp1
= tcg_temp_new_i64();
9990 gen_load_fpr64(ctx
, fp0
, fs
);
9991 gen_load_fpr64(ctx
, fp1
, ft
);
9992 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9993 tcg_temp_free_i64(fp1
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
10001 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 gen_load_fpr64(ctx
, fp0
, fs
);
10004 gen_helper_float_abs_ps(fp0
, fp0
);
10005 gen_store_fpr64(ctx
, fp0
, fd
);
10006 tcg_temp_free_i64(fp0
);
10012 TCGv_i64 fp0
= tcg_temp_new_i64();
10014 gen_load_fpr64(ctx
, fp0
, fs
);
10015 gen_store_fpr64(ctx
, fp0
, fd
);
10016 tcg_temp_free_i64(fp0
);
10022 TCGv_i64 fp0
= tcg_temp_new_i64();
10024 gen_load_fpr64(ctx
, fp0
, fs
);
10025 gen_helper_float_chs_ps(fp0
, fp0
);
10026 gen_store_fpr64(ctx
, fp0
, fd
);
10027 tcg_temp_free_i64(fp0
);
10032 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10037 TCGLabel
*l1
= gen_new_label();
10041 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10042 fp0
= tcg_temp_new_i64();
10043 gen_load_fpr64(ctx
, fp0
, fs
);
10044 gen_store_fpr64(ctx
, fp0
, fd
);
10045 tcg_temp_free_i64(fp0
);
10052 TCGLabel
*l1
= gen_new_label();
10056 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10057 fp0
= tcg_temp_new_i64();
10058 gen_load_fpr64(ctx
, fp0
, fs
);
10059 gen_store_fpr64(ctx
, fp0
, fd
);
10060 tcg_temp_free_i64(fp0
);
10068 TCGv_i64 fp0
= tcg_temp_new_i64();
10069 TCGv_i64 fp1
= tcg_temp_new_i64();
10071 gen_load_fpr64(ctx
, fp0
, ft
);
10072 gen_load_fpr64(ctx
, fp1
, fs
);
10073 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10074 tcg_temp_free_i64(fp1
);
10075 gen_store_fpr64(ctx
, fp0
, fd
);
10076 tcg_temp_free_i64(fp0
);
10082 TCGv_i64 fp0
= tcg_temp_new_i64();
10083 TCGv_i64 fp1
= tcg_temp_new_i64();
10085 gen_load_fpr64(ctx
, fp0
, ft
);
10086 gen_load_fpr64(ctx
, fp1
, fs
);
10087 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10088 tcg_temp_free_i64(fp1
);
10089 gen_store_fpr64(ctx
, fp0
, fd
);
10090 tcg_temp_free_i64(fp0
);
10093 case OPC_RECIP2_PS
:
10096 TCGv_i64 fp0
= tcg_temp_new_i64();
10097 TCGv_i64 fp1
= tcg_temp_new_i64();
10099 gen_load_fpr64(ctx
, fp0
, fs
);
10100 gen_load_fpr64(ctx
, fp1
, ft
);
10101 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10102 tcg_temp_free_i64(fp1
);
10103 gen_store_fpr64(ctx
, fp0
, fd
);
10104 tcg_temp_free_i64(fp0
);
10107 case OPC_RECIP1_PS
:
10110 TCGv_i64 fp0
= tcg_temp_new_i64();
10112 gen_load_fpr64(ctx
, fp0
, fs
);
10113 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10114 gen_store_fpr64(ctx
, fp0
, fd
);
10115 tcg_temp_free_i64(fp0
);
10118 case OPC_RSQRT1_PS
:
10121 TCGv_i64 fp0
= tcg_temp_new_i64();
10123 gen_load_fpr64(ctx
, fp0
, fs
);
10124 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10125 gen_store_fpr64(ctx
, fp0
, fd
);
10126 tcg_temp_free_i64(fp0
);
10129 case OPC_RSQRT2_PS
:
10132 TCGv_i64 fp0
= tcg_temp_new_i64();
10133 TCGv_i64 fp1
= tcg_temp_new_i64();
10135 gen_load_fpr64(ctx
, fp0
, fs
);
10136 gen_load_fpr64(ctx
, fp1
, ft
);
10137 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10138 tcg_temp_free_i64(fp1
);
10139 gen_store_fpr64(ctx
, fp0
, fd
);
10140 tcg_temp_free_i64(fp0
);
10144 check_cp1_64bitmode(ctx
);
10146 TCGv_i32 fp0
= tcg_temp_new_i32();
10148 gen_load_fpr32h(ctx
, fp0
, fs
);
10149 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10150 gen_store_fpr32(ctx
, fp0
, fd
);
10151 tcg_temp_free_i32(fp0
);
10154 case OPC_CVT_PW_PS
:
10157 TCGv_i64 fp0
= tcg_temp_new_i64();
10159 gen_load_fpr64(ctx
, fp0
, fs
);
10160 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10161 gen_store_fpr64(ctx
, fp0
, fd
);
10162 tcg_temp_free_i64(fp0
);
10166 check_cp1_64bitmode(ctx
);
10168 TCGv_i32 fp0
= tcg_temp_new_i32();
10170 gen_load_fpr32(ctx
, fp0
, fs
);
10171 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10172 gen_store_fpr32(ctx
, fp0
, fd
);
10173 tcg_temp_free_i32(fp0
);
10179 TCGv_i32 fp0
= tcg_temp_new_i32();
10180 TCGv_i32 fp1
= tcg_temp_new_i32();
10182 gen_load_fpr32(ctx
, fp0
, fs
);
10183 gen_load_fpr32(ctx
, fp1
, ft
);
10184 gen_store_fpr32h(ctx
, fp0
, fd
);
10185 gen_store_fpr32(ctx
, fp1
, fd
);
10186 tcg_temp_free_i32(fp0
);
10187 tcg_temp_free_i32(fp1
);
10193 TCGv_i32 fp0
= tcg_temp_new_i32();
10194 TCGv_i32 fp1
= tcg_temp_new_i32();
10196 gen_load_fpr32(ctx
, fp0
, fs
);
10197 gen_load_fpr32h(ctx
, fp1
, ft
);
10198 gen_store_fpr32(ctx
, fp1
, fd
);
10199 gen_store_fpr32h(ctx
, fp0
, fd
);
10200 tcg_temp_free_i32(fp0
);
10201 tcg_temp_free_i32(fp1
);
10207 TCGv_i32 fp0
= tcg_temp_new_i32();
10208 TCGv_i32 fp1
= tcg_temp_new_i32();
10210 gen_load_fpr32h(ctx
, fp0
, fs
);
10211 gen_load_fpr32(ctx
, fp1
, ft
);
10212 gen_store_fpr32(ctx
, fp1
, fd
);
10213 gen_store_fpr32h(ctx
, fp0
, fd
);
10214 tcg_temp_free_i32(fp0
);
10215 tcg_temp_free_i32(fp1
);
10221 TCGv_i32 fp0
= tcg_temp_new_i32();
10222 TCGv_i32 fp1
= tcg_temp_new_i32();
10224 gen_load_fpr32h(ctx
, fp0
, fs
);
10225 gen_load_fpr32h(ctx
, fp1
, ft
);
10226 gen_store_fpr32(ctx
, fp1
, fd
);
10227 gen_store_fpr32h(ctx
, fp0
, fd
);
10228 tcg_temp_free_i32(fp0
);
10229 tcg_temp_free_i32(fp1
);
10233 case OPC_CMP_UN_PS
:
10234 case OPC_CMP_EQ_PS
:
10235 case OPC_CMP_UEQ_PS
:
10236 case OPC_CMP_OLT_PS
:
10237 case OPC_CMP_ULT_PS
:
10238 case OPC_CMP_OLE_PS
:
10239 case OPC_CMP_ULE_PS
:
10240 case OPC_CMP_SF_PS
:
10241 case OPC_CMP_NGLE_PS
:
10242 case OPC_CMP_SEQ_PS
:
10243 case OPC_CMP_NGL_PS
:
10244 case OPC_CMP_LT_PS
:
10245 case OPC_CMP_NGE_PS
:
10246 case OPC_CMP_LE_PS
:
10247 case OPC_CMP_NGT_PS
:
10248 if (ctx
->opcode
& (1 << 6)) {
10249 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10251 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10255 MIPS_INVAL("farith");
10256 generate_exception_end(ctx
, EXCP_RI
);
10261 /* Coprocessor 3 (FPU) */
10262 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10263 int fd
, int fs
, int base
, int index
)
10265 TCGv t0
= tcg_temp_new();
10268 gen_load_gpr(t0
, index
);
10269 } else if (index
== 0) {
10270 gen_load_gpr(t0
, base
);
10272 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10274 /* Don't do NOP if destination is zero: we must perform the actual
10280 TCGv_i32 fp0
= tcg_temp_new_i32();
10282 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10283 tcg_gen_trunc_tl_i32(fp0
, t0
);
10284 gen_store_fpr32(ctx
, fp0
, fd
);
10285 tcg_temp_free_i32(fp0
);
10290 check_cp1_registers(ctx
, fd
);
10292 TCGv_i64 fp0
= tcg_temp_new_i64();
10293 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10294 gen_store_fpr64(ctx
, fp0
, fd
);
10295 tcg_temp_free_i64(fp0
);
10299 check_cp1_64bitmode(ctx
);
10300 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10302 TCGv_i64 fp0
= tcg_temp_new_i64();
10304 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10305 gen_store_fpr64(ctx
, fp0
, fd
);
10306 tcg_temp_free_i64(fp0
);
10312 TCGv_i32 fp0
= tcg_temp_new_i32();
10313 gen_load_fpr32(ctx
, fp0
, fs
);
10314 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10315 tcg_temp_free_i32(fp0
);
10320 check_cp1_registers(ctx
, fs
);
10322 TCGv_i64 fp0
= tcg_temp_new_i64();
10323 gen_load_fpr64(ctx
, fp0
, fs
);
10324 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10325 tcg_temp_free_i64(fp0
);
10329 check_cp1_64bitmode(ctx
);
10330 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10332 TCGv_i64 fp0
= tcg_temp_new_i64();
10333 gen_load_fpr64(ctx
, fp0
, fs
);
10334 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10335 tcg_temp_free_i64(fp0
);
10342 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10343 int fd
, int fr
, int fs
, int ft
)
10349 TCGv t0
= tcg_temp_local_new();
10350 TCGv_i32 fp
= tcg_temp_new_i32();
10351 TCGv_i32 fph
= tcg_temp_new_i32();
10352 TCGLabel
*l1
= gen_new_label();
10353 TCGLabel
*l2
= gen_new_label();
10355 gen_load_gpr(t0
, fr
);
10356 tcg_gen_andi_tl(t0
, t0
, 0x7);
10358 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10359 gen_load_fpr32(ctx
, fp
, fs
);
10360 gen_load_fpr32h(ctx
, fph
, fs
);
10361 gen_store_fpr32(ctx
, fp
, fd
);
10362 gen_store_fpr32h(ctx
, fph
, fd
);
10365 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10367 #ifdef TARGET_WORDS_BIGENDIAN
10368 gen_load_fpr32(ctx
, fp
, fs
);
10369 gen_load_fpr32h(ctx
, fph
, ft
);
10370 gen_store_fpr32h(ctx
, fp
, fd
);
10371 gen_store_fpr32(ctx
, fph
, fd
);
10373 gen_load_fpr32h(ctx
, fph
, fs
);
10374 gen_load_fpr32(ctx
, fp
, ft
);
10375 gen_store_fpr32(ctx
, fph
, fd
);
10376 gen_store_fpr32h(ctx
, fp
, fd
);
10379 tcg_temp_free_i32(fp
);
10380 tcg_temp_free_i32(fph
);
10386 TCGv_i32 fp0
= tcg_temp_new_i32();
10387 TCGv_i32 fp1
= tcg_temp_new_i32();
10388 TCGv_i32 fp2
= tcg_temp_new_i32();
10390 gen_load_fpr32(ctx
, fp0
, fs
);
10391 gen_load_fpr32(ctx
, fp1
, ft
);
10392 gen_load_fpr32(ctx
, fp2
, fr
);
10393 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10394 tcg_temp_free_i32(fp0
);
10395 tcg_temp_free_i32(fp1
);
10396 gen_store_fpr32(ctx
, fp2
, fd
);
10397 tcg_temp_free_i32(fp2
);
10402 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10404 TCGv_i64 fp0
= tcg_temp_new_i64();
10405 TCGv_i64 fp1
= tcg_temp_new_i64();
10406 TCGv_i64 fp2
= tcg_temp_new_i64();
10408 gen_load_fpr64(ctx
, fp0
, fs
);
10409 gen_load_fpr64(ctx
, fp1
, ft
);
10410 gen_load_fpr64(ctx
, fp2
, fr
);
10411 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10412 tcg_temp_free_i64(fp0
);
10413 tcg_temp_free_i64(fp1
);
10414 gen_store_fpr64(ctx
, fp2
, fd
);
10415 tcg_temp_free_i64(fp2
);
10421 TCGv_i64 fp0
= tcg_temp_new_i64();
10422 TCGv_i64 fp1
= tcg_temp_new_i64();
10423 TCGv_i64 fp2
= tcg_temp_new_i64();
10425 gen_load_fpr64(ctx
, fp0
, fs
);
10426 gen_load_fpr64(ctx
, fp1
, ft
);
10427 gen_load_fpr64(ctx
, fp2
, fr
);
10428 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10429 tcg_temp_free_i64(fp0
);
10430 tcg_temp_free_i64(fp1
);
10431 gen_store_fpr64(ctx
, fp2
, fd
);
10432 tcg_temp_free_i64(fp2
);
10438 TCGv_i32 fp0
= tcg_temp_new_i32();
10439 TCGv_i32 fp1
= tcg_temp_new_i32();
10440 TCGv_i32 fp2
= tcg_temp_new_i32();
10442 gen_load_fpr32(ctx
, fp0
, fs
);
10443 gen_load_fpr32(ctx
, fp1
, ft
);
10444 gen_load_fpr32(ctx
, fp2
, fr
);
10445 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10446 tcg_temp_free_i32(fp0
);
10447 tcg_temp_free_i32(fp1
);
10448 gen_store_fpr32(ctx
, fp2
, fd
);
10449 tcg_temp_free_i32(fp2
);
10454 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10456 TCGv_i64 fp0
= tcg_temp_new_i64();
10457 TCGv_i64 fp1
= tcg_temp_new_i64();
10458 TCGv_i64 fp2
= tcg_temp_new_i64();
10460 gen_load_fpr64(ctx
, fp0
, fs
);
10461 gen_load_fpr64(ctx
, fp1
, ft
);
10462 gen_load_fpr64(ctx
, fp2
, fr
);
10463 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10464 tcg_temp_free_i64(fp0
);
10465 tcg_temp_free_i64(fp1
);
10466 gen_store_fpr64(ctx
, fp2
, fd
);
10467 tcg_temp_free_i64(fp2
);
10473 TCGv_i64 fp0
= tcg_temp_new_i64();
10474 TCGv_i64 fp1
= tcg_temp_new_i64();
10475 TCGv_i64 fp2
= tcg_temp_new_i64();
10477 gen_load_fpr64(ctx
, fp0
, fs
);
10478 gen_load_fpr64(ctx
, fp1
, ft
);
10479 gen_load_fpr64(ctx
, fp2
, fr
);
10480 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10481 tcg_temp_free_i64(fp0
);
10482 tcg_temp_free_i64(fp1
);
10483 gen_store_fpr64(ctx
, fp2
, fd
);
10484 tcg_temp_free_i64(fp2
);
10490 TCGv_i32 fp0
= tcg_temp_new_i32();
10491 TCGv_i32 fp1
= tcg_temp_new_i32();
10492 TCGv_i32 fp2
= tcg_temp_new_i32();
10494 gen_load_fpr32(ctx
, fp0
, fs
);
10495 gen_load_fpr32(ctx
, fp1
, ft
);
10496 gen_load_fpr32(ctx
, fp2
, fr
);
10497 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10498 tcg_temp_free_i32(fp0
);
10499 tcg_temp_free_i32(fp1
);
10500 gen_store_fpr32(ctx
, fp2
, fd
);
10501 tcg_temp_free_i32(fp2
);
10506 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10508 TCGv_i64 fp0
= tcg_temp_new_i64();
10509 TCGv_i64 fp1
= tcg_temp_new_i64();
10510 TCGv_i64 fp2
= tcg_temp_new_i64();
10512 gen_load_fpr64(ctx
, fp0
, fs
);
10513 gen_load_fpr64(ctx
, fp1
, ft
);
10514 gen_load_fpr64(ctx
, fp2
, fr
);
10515 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10516 tcg_temp_free_i64(fp0
);
10517 tcg_temp_free_i64(fp1
);
10518 gen_store_fpr64(ctx
, fp2
, fd
);
10519 tcg_temp_free_i64(fp2
);
10525 TCGv_i64 fp0
= tcg_temp_new_i64();
10526 TCGv_i64 fp1
= tcg_temp_new_i64();
10527 TCGv_i64 fp2
= tcg_temp_new_i64();
10529 gen_load_fpr64(ctx
, fp0
, fs
);
10530 gen_load_fpr64(ctx
, fp1
, ft
);
10531 gen_load_fpr64(ctx
, fp2
, fr
);
10532 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10533 tcg_temp_free_i64(fp0
);
10534 tcg_temp_free_i64(fp1
);
10535 gen_store_fpr64(ctx
, fp2
, fd
);
10536 tcg_temp_free_i64(fp2
);
10542 TCGv_i32 fp0
= tcg_temp_new_i32();
10543 TCGv_i32 fp1
= tcg_temp_new_i32();
10544 TCGv_i32 fp2
= tcg_temp_new_i32();
10546 gen_load_fpr32(ctx
, fp0
, fs
);
10547 gen_load_fpr32(ctx
, fp1
, ft
);
10548 gen_load_fpr32(ctx
, fp2
, fr
);
10549 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10550 tcg_temp_free_i32(fp0
);
10551 tcg_temp_free_i32(fp1
);
10552 gen_store_fpr32(ctx
, fp2
, fd
);
10553 tcg_temp_free_i32(fp2
);
10558 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10560 TCGv_i64 fp0
= tcg_temp_new_i64();
10561 TCGv_i64 fp1
= tcg_temp_new_i64();
10562 TCGv_i64 fp2
= tcg_temp_new_i64();
10564 gen_load_fpr64(ctx
, fp0
, fs
);
10565 gen_load_fpr64(ctx
, fp1
, ft
);
10566 gen_load_fpr64(ctx
, fp2
, fr
);
10567 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10568 tcg_temp_free_i64(fp0
);
10569 tcg_temp_free_i64(fp1
);
10570 gen_store_fpr64(ctx
, fp2
, fd
);
10571 tcg_temp_free_i64(fp2
);
10577 TCGv_i64 fp0
= tcg_temp_new_i64();
10578 TCGv_i64 fp1
= tcg_temp_new_i64();
10579 TCGv_i64 fp2
= tcg_temp_new_i64();
10581 gen_load_fpr64(ctx
, fp0
, fs
);
10582 gen_load_fpr64(ctx
, fp1
, ft
);
10583 gen_load_fpr64(ctx
, fp2
, fr
);
10584 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10585 tcg_temp_free_i64(fp0
);
10586 tcg_temp_free_i64(fp1
);
10587 gen_store_fpr64(ctx
, fp2
, fd
);
10588 tcg_temp_free_i64(fp2
);
10592 MIPS_INVAL("flt3_arith");
10593 generate_exception_end(ctx
, EXCP_RI
);
10598 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10602 #if !defined(CONFIG_USER_ONLY)
10603 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10604 Therefore only check the ISA in system mode. */
10605 check_insn(ctx
, ISA_MIPS32R2
);
10607 t0
= tcg_temp_new();
10611 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10612 gen_store_gpr(t0
, rt
);
10615 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10616 gen_store_gpr(t0
, rt
);
10619 gen_helper_rdhwr_cc(t0
, cpu_env
);
10620 gen_store_gpr(t0
, rt
);
10623 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10624 gen_store_gpr(t0
, rt
);
10627 check_insn(ctx
, ISA_MIPS32R6
);
10629 /* Performance counter registers are not implemented other than
10630 * control register 0.
10632 generate_exception(ctx
, EXCP_RI
);
10634 gen_helper_rdhwr_performance(t0
, cpu_env
);
10635 gen_store_gpr(t0
, rt
);
10638 check_insn(ctx
, ISA_MIPS32R6
);
10639 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10640 gen_store_gpr(t0
, rt
);
10643 #if defined(CONFIG_USER_ONLY)
10644 tcg_gen_ld_tl(t0
, cpu_env
,
10645 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10646 gen_store_gpr(t0
, rt
);
10649 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10650 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10651 tcg_gen_ld_tl(t0
, cpu_env
,
10652 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10653 gen_store_gpr(t0
, rt
);
10655 generate_exception_end(ctx
, EXCP_RI
);
10659 default: /* Invalid */
10660 MIPS_INVAL("rdhwr");
10661 generate_exception_end(ctx
, EXCP_RI
);
10667 static inline void clear_branch_hflags(DisasContext
*ctx
)
10669 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10670 if (ctx
->bstate
== BS_NONE
) {
10671 save_cpu_state(ctx
, 0);
10673 /* it is not safe to save ctx->hflags as hflags may be changed
10674 in execution time by the instruction in delay / forbidden slot. */
10675 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10679 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10681 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10682 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10683 /* Branches completion */
10684 clear_branch_hflags(ctx
);
10685 ctx
->bstate
= BS_BRANCH
;
10686 /* FIXME: Need to clear can_do_io. */
10687 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10688 case MIPS_HFLAG_FBNSLOT
:
10689 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10692 /* unconditional branch */
10693 if (proc_hflags
& MIPS_HFLAG_BX
) {
10694 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10696 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10698 case MIPS_HFLAG_BL
:
10699 /* blikely taken case */
10700 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10702 case MIPS_HFLAG_BC
:
10703 /* Conditional branch */
10705 TCGLabel
*l1
= gen_new_label();
10707 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10708 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10710 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10713 case MIPS_HFLAG_BR
:
10714 /* unconditional branch to register */
10715 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10716 TCGv t0
= tcg_temp_new();
10717 TCGv_i32 t1
= tcg_temp_new_i32();
10719 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10720 tcg_gen_trunc_tl_i32(t1
, t0
);
10722 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10723 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10724 tcg_gen_or_i32(hflags
, hflags
, t1
);
10725 tcg_temp_free_i32(t1
);
10727 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10729 tcg_gen_mov_tl(cpu_PC
, btarget
);
10731 if (ctx
->singlestep_enabled
) {
10732 save_cpu_state(ctx
, 0);
10733 gen_helper_raise_exception_debug(cpu_env
);
10735 tcg_gen_exit_tb(0);
10738 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10744 /* Compact Branches */
10745 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10746 int rs
, int rt
, int32_t offset
)
10748 int bcond_compute
= 0;
10749 TCGv t0
= tcg_temp_new();
10750 TCGv t1
= tcg_temp_new();
10751 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10753 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10754 #ifdef MIPS_DEBUG_DISAS
10755 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10758 generate_exception_end(ctx
, EXCP_RI
);
10762 /* Load needed operands and calculate btarget */
10764 /* compact branch */
10765 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10766 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10767 gen_load_gpr(t0
, rs
);
10768 gen_load_gpr(t1
, rt
);
10770 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10771 if (rs
<= rt
&& rs
== 0) {
10772 /* OPC_BEQZALC, OPC_BNEZALC */
10773 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10776 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10777 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10778 gen_load_gpr(t0
, rs
);
10779 gen_load_gpr(t1
, rt
);
10781 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10783 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10784 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10785 if (rs
== 0 || rs
== rt
) {
10786 /* OPC_BLEZALC, OPC_BGEZALC */
10787 /* OPC_BGTZALC, OPC_BLTZALC */
10788 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10790 gen_load_gpr(t0
, rs
);
10791 gen_load_gpr(t1
, rt
);
10793 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10797 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10802 /* OPC_BEQZC, OPC_BNEZC */
10803 gen_load_gpr(t0
, rs
);
10805 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10807 /* OPC_JIC, OPC_JIALC */
10808 TCGv tbase
= tcg_temp_new();
10809 TCGv toffset
= tcg_temp_new();
10811 gen_load_gpr(tbase
, rt
);
10812 tcg_gen_movi_tl(toffset
, offset
);
10813 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10814 tcg_temp_free(tbase
);
10815 tcg_temp_free(toffset
);
10819 MIPS_INVAL("Compact branch/jump");
10820 generate_exception_end(ctx
, EXCP_RI
);
10824 if (bcond_compute
== 0) {
10825 /* Uncoditional compact branch */
10828 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10831 ctx
->hflags
|= MIPS_HFLAG_BR
;
10834 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10837 ctx
->hflags
|= MIPS_HFLAG_B
;
10840 MIPS_INVAL("Compact branch/jump");
10841 generate_exception_end(ctx
, EXCP_RI
);
10845 /* Generating branch here as compact branches don't have delay slot */
10846 gen_branch(ctx
, 4);
10848 /* Conditional compact branch */
10849 TCGLabel
*fs
= gen_new_label();
10850 save_cpu_state(ctx
, 0);
10853 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10854 if (rs
== 0 && rt
!= 0) {
10856 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10857 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10859 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10862 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10865 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10866 if (rs
== 0 && rt
!= 0) {
10868 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10869 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10871 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10874 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10877 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10878 if (rs
== 0 && rt
!= 0) {
10880 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10881 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10883 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10886 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10889 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10890 if (rs
== 0 && rt
!= 0) {
10892 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10893 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10895 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10898 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10901 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10902 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10904 /* OPC_BOVC, OPC_BNVC */
10905 TCGv t2
= tcg_temp_new();
10906 TCGv t3
= tcg_temp_new();
10907 TCGv t4
= tcg_temp_new();
10908 TCGv input_overflow
= tcg_temp_new();
10910 gen_load_gpr(t0
, rs
);
10911 gen_load_gpr(t1
, rt
);
10912 tcg_gen_ext32s_tl(t2
, t0
);
10913 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10914 tcg_gen_ext32s_tl(t3
, t1
);
10915 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10916 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10918 tcg_gen_add_tl(t4
, t2
, t3
);
10919 tcg_gen_ext32s_tl(t4
, t4
);
10920 tcg_gen_xor_tl(t2
, t2
, t3
);
10921 tcg_gen_xor_tl(t3
, t4
, t3
);
10922 tcg_gen_andc_tl(t2
, t3
, t2
);
10923 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10924 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10925 if (opc
== OPC_BOVC
) {
10927 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10930 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10932 tcg_temp_free(input_overflow
);
10936 } else if (rs
< rt
&& rs
== 0) {
10937 /* OPC_BEQZALC, OPC_BNEZALC */
10938 if (opc
== OPC_BEQZALC
) {
10940 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10943 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10946 /* OPC_BEQC, OPC_BNEC */
10947 if (opc
== OPC_BEQC
) {
10949 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10952 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10957 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10960 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10963 MIPS_INVAL("Compact conditional branch/jump");
10964 generate_exception_end(ctx
, EXCP_RI
);
10968 /* Generating branch here as compact branches don't have delay slot */
10969 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10972 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10980 /* ISA extensions (ASEs) */
10981 /* MIPS16 extension to MIPS32 */
10983 /* MIPS16 major opcodes */
10985 M16_OPC_ADDIUSP
= 0x00,
10986 M16_OPC_ADDIUPC
= 0x01,
10988 M16_OPC_JAL
= 0x03,
10989 M16_OPC_BEQZ
= 0x04,
10990 M16_OPC_BNEQZ
= 0x05,
10991 M16_OPC_SHIFT
= 0x06,
10993 M16_OPC_RRIA
= 0x08,
10994 M16_OPC_ADDIU8
= 0x09,
10995 M16_OPC_SLTI
= 0x0a,
10996 M16_OPC_SLTIU
= 0x0b,
10999 M16_OPC_CMPI
= 0x0e,
11003 M16_OPC_LWSP
= 0x12,
11005 M16_OPC_LBU
= 0x14,
11006 M16_OPC_LHU
= 0x15,
11007 M16_OPC_LWPC
= 0x16,
11008 M16_OPC_LWU
= 0x17,
11011 M16_OPC_SWSP
= 0x1a,
11013 M16_OPC_RRR
= 0x1c,
11015 M16_OPC_EXTEND
= 0x1e,
11019 /* I8 funct field */
11038 /* RR funct field */
11072 /* I64 funct field */
11080 I64_DADDIUPC
= 0x6,
11084 /* RR ry field for CNVT */
11086 RR_RY_CNVT_ZEB
= 0x0,
11087 RR_RY_CNVT_ZEH
= 0x1,
11088 RR_RY_CNVT_ZEW
= 0x2,
11089 RR_RY_CNVT_SEB
= 0x4,
11090 RR_RY_CNVT_SEH
= 0x5,
11091 RR_RY_CNVT_SEW
= 0x6,
11094 static int xlat (int r
)
11096 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11101 static void gen_mips16_save (DisasContext
*ctx
,
11102 int xsregs
, int aregs
,
11103 int do_ra
, int do_s0
, int do_s1
,
11106 TCGv t0
= tcg_temp_new();
11107 TCGv t1
= tcg_temp_new();
11108 TCGv t2
= tcg_temp_new();
11138 generate_exception_end(ctx
, EXCP_RI
);
11144 gen_base_offset_addr(ctx
, t0
, 29, 12);
11145 gen_load_gpr(t1
, 7);
11146 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11149 gen_base_offset_addr(ctx
, t0
, 29, 8);
11150 gen_load_gpr(t1
, 6);
11151 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11154 gen_base_offset_addr(ctx
, t0
, 29, 4);
11155 gen_load_gpr(t1
, 5);
11156 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11159 gen_base_offset_addr(ctx
, t0
, 29, 0);
11160 gen_load_gpr(t1
, 4);
11161 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11164 gen_load_gpr(t0
, 29);
11166 #define DECR_AND_STORE(reg) do { \
11167 tcg_gen_movi_tl(t2, -4); \
11168 gen_op_addr_add(ctx, t0, t0, t2); \
11169 gen_load_gpr(t1, reg); \
11170 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11174 DECR_AND_STORE(31);
11179 DECR_AND_STORE(30);
11182 DECR_AND_STORE(23);
11185 DECR_AND_STORE(22);
11188 DECR_AND_STORE(21);
11191 DECR_AND_STORE(20);
11194 DECR_AND_STORE(19);
11197 DECR_AND_STORE(18);
11201 DECR_AND_STORE(17);
11204 DECR_AND_STORE(16);
11234 generate_exception_end(ctx
, EXCP_RI
);
11250 #undef DECR_AND_STORE
11252 tcg_gen_movi_tl(t2
, -framesize
);
11253 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11259 static void gen_mips16_restore (DisasContext
*ctx
,
11260 int xsregs
, int aregs
,
11261 int do_ra
, int do_s0
, int do_s1
,
11265 TCGv t0
= tcg_temp_new();
11266 TCGv t1
= tcg_temp_new();
11267 TCGv t2
= tcg_temp_new();
11269 tcg_gen_movi_tl(t2
, framesize
);
11270 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11272 #define DECR_AND_LOAD(reg) do { \
11273 tcg_gen_movi_tl(t2, -4); \
11274 gen_op_addr_add(ctx, t0, t0, t2); \
11275 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11276 gen_store_gpr(t1, reg); \
11340 generate_exception_end(ctx
, EXCP_RI
);
11356 #undef DECR_AND_LOAD
11358 tcg_gen_movi_tl(t2
, framesize
);
11359 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11365 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11366 int is_64_bit
, int extended
)
11370 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11371 generate_exception_end(ctx
, EXCP_RI
);
11375 t0
= tcg_temp_new();
11377 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11378 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11380 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11386 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11389 TCGv_i32 t0
= tcg_const_i32(op
);
11390 TCGv t1
= tcg_temp_new();
11391 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11392 gen_helper_cache(cpu_env
, t1
, t0
);
11395 #if defined(TARGET_MIPS64)
11396 static void decode_i64_mips16 (DisasContext
*ctx
,
11397 int ry
, int funct
, int16_t offset
,
11402 check_insn(ctx
, ISA_MIPS3
);
11403 check_mips_64(ctx
);
11404 offset
= extended
? offset
: offset
<< 3;
11405 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11408 check_insn(ctx
, ISA_MIPS3
);
11409 check_mips_64(ctx
);
11410 offset
= extended
? offset
: offset
<< 3;
11411 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11414 check_insn(ctx
, ISA_MIPS3
);
11415 check_mips_64(ctx
);
11416 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11417 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11420 check_insn(ctx
, ISA_MIPS3
);
11421 check_mips_64(ctx
);
11422 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11423 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11426 check_insn(ctx
, ISA_MIPS3
);
11427 check_mips_64(ctx
);
11428 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11429 generate_exception_end(ctx
, EXCP_RI
);
11431 offset
= extended
? offset
: offset
<< 3;
11432 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11436 check_insn(ctx
, ISA_MIPS3
);
11437 check_mips_64(ctx
);
11438 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11439 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11442 check_insn(ctx
, ISA_MIPS3
);
11443 check_mips_64(ctx
);
11444 offset
= extended
? offset
: offset
<< 2;
11445 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11448 check_insn(ctx
, ISA_MIPS3
);
11449 check_mips_64(ctx
);
11450 offset
= extended
? offset
: offset
<< 2;
11451 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11457 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11459 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11460 int op
, rx
, ry
, funct
, sa
;
11461 int16_t imm
, offset
;
11463 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11464 op
= (ctx
->opcode
>> 11) & 0x1f;
11465 sa
= (ctx
->opcode
>> 22) & 0x1f;
11466 funct
= (ctx
->opcode
>> 8) & 0x7;
11467 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11468 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11469 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11470 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11471 | (ctx
->opcode
& 0x1f));
11473 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11476 case M16_OPC_ADDIUSP
:
11477 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11479 case M16_OPC_ADDIUPC
:
11480 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11483 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11484 /* No delay slot, so just process as a normal instruction */
11487 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11488 /* No delay slot, so just process as a normal instruction */
11490 case M16_OPC_BNEQZ
:
11491 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11492 /* No delay slot, so just process as a normal instruction */
11494 case M16_OPC_SHIFT
:
11495 switch (ctx
->opcode
& 0x3) {
11497 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11500 #if defined(TARGET_MIPS64)
11501 check_mips_64(ctx
);
11502 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11504 generate_exception_end(ctx
, EXCP_RI
);
11508 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11511 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11515 #if defined(TARGET_MIPS64)
11517 check_insn(ctx
, ISA_MIPS3
);
11518 check_mips_64(ctx
);
11519 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11523 imm
= ctx
->opcode
& 0xf;
11524 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11525 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11526 imm
= (int16_t) (imm
<< 1) >> 1;
11527 if ((ctx
->opcode
>> 4) & 0x1) {
11528 #if defined(TARGET_MIPS64)
11529 check_mips_64(ctx
);
11530 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11532 generate_exception_end(ctx
, EXCP_RI
);
11535 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11538 case M16_OPC_ADDIU8
:
11539 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11542 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11544 case M16_OPC_SLTIU
:
11545 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11550 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11553 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11556 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11559 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11562 check_insn(ctx
, ISA_MIPS32
);
11564 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11565 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11566 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11567 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11568 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11569 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11570 | (ctx
->opcode
& 0xf)) << 3;
11572 if (ctx
->opcode
& (1 << 7)) {
11573 gen_mips16_save(ctx
, xsregs
, aregs
,
11574 do_ra
, do_s0
, do_s1
,
11577 gen_mips16_restore(ctx
, xsregs
, aregs
,
11578 do_ra
, do_s0
, do_s1
,
11584 generate_exception_end(ctx
, EXCP_RI
);
11589 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11592 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11594 #if defined(TARGET_MIPS64)
11596 check_insn(ctx
, ISA_MIPS3
);
11597 check_mips_64(ctx
);
11598 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11602 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11605 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11608 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11611 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11614 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11617 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11620 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11622 #if defined(TARGET_MIPS64)
11624 check_insn(ctx
, ISA_MIPS3
);
11625 check_mips_64(ctx
);
11626 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11630 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11633 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11636 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11639 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11641 #if defined(TARGET_MIPS64)
11643 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11647 generate_exception_end(ctx
, EXCP_RI
);
11654 static inline bool is_uhi(int sdbbp_code
)
11656 #ifdef CONFIG_USER_ONLY
11659 return semihosting_enabled() && sdbbp_code
== 1;
11663 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11667 int op
, cnvt_op
, op1
, offset
;
11671 op
= (ctx
->opcode
>> 11) & 0x1f;
11672 sa
= (ctx
->opcode
>> 2) & 0x7;
11673 sa
= sa
== 0 ? 8 : sa
;
11674 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11675 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11676 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11677 op1
= offset
= ctx
->opcode
& 0x1f;
11682 case M16_OPC_ADDIUSP
:
11684 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11686 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11689 case M16_OPC_ADDIUPC
:
11690 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11693 offset
= (ctx
->opcode
& 0x7ff) << 1;
11694 offset
= (int16_t)(offset
<< 4) >> 4;
11695 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11696 /* No delay slot, so just process as a normal instruction */
11699 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11700 offset
= (((ctx
->opcode
& 0x1f) << 21)
11701 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11703 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11704 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11708 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11709 ((int8_t)ctx
->opcode
) << 1, 0);
11710 /* No delay slot, so just process as a normal instruction */
11712 case M16_OPC_BNEQZ
:
11713 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11714 ((int8_t)ctx
->opcode
) << 1, 0);
11715 /* No delay slot, so just process as a normal instruction */
11717 case M16_OPC_SHIFT
:
11718 switch (ctx
->opcode
& 0x3) {
11720 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11723 #if defined(TARGET_MIPS64)
11724 check_insn(ctx
, ISA_MIPS3
);
11725 check_mips_64(ctx
);
11726 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11728 generate_exception_end(ctx
, EXCP_RI
);
11732 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11735 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11739 #if defined(TARGET_MIPS64)
11741 check_insn(ctx
, ISA_MIPS3
);
11742 check_mips_64(ctx
);
11743 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11748 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11750 if ((ctx
->opcode
>> 4) & 1) {
11751 #if defined(TARGET_MIPS64)
11752 check_insn(ctx
, ISA_MIPS3
);
11753 check_mips_64(ctx
);
11754 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11756 generate_exception_end(ctx
, EXCP_RI
);
11759 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11763 case M16_OPC_ADDIU8
:
11765 int16_t imm
= (int8_t) ctx
->opcode
;
11767 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11772 int16_t imm
= (uint8_t) ctx
->opcode
;
11773 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11776 case M16_OPC_SLTIU
:
11778 int16_t imm
= (uint8_t) ctx
->opcode
;
11779 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11786 funct
= (ctx
->opcode
>> 8) & 0x7;
11789 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11790 ((int8_t)ctx
->opcode
) << 1, 0);
11793 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11794 ((int8_t)ctx
->opcode
) << 1, 0);
11797 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11800 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11801 ((int8_t)ctx
->opcode
) << 3);
11804 check_insn(ctx
, ISA_MIPS32
);
11806 int do_ra
= ctx
->opcode
& (1 << 6);
11807 int do_s0
= ctx
->opcode
& (1 << 5);
11808 int do_s1
= ctx
->opcode
& (1 << 4);
11809 int framesize
= ctx
->opcode
& 0xf;
11811 if (framesize
== 0) {
11814 framesize
= framesize
<< 3;
11817 if (ctx
->opcode
& (1 << 7)) {
11818 gen_mips16_save(ctx
, 0, 0,
11819 do_ra
, do_s0
, do_s1
, framesize
);
11821 gen_mips16_restore(ctx
, 0, 0,
11822 do_ra
, do_s0
, do_s1
, framesize
);
11828 int rz
= xlat(ctx
->opcode
& 0x7);
11830 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11831 ((ctx
->opcode
>> 5) & 0x7);
11832 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11836 reg32
= ctx
->opcode
& 0x1f;
11837 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11840 generate_exception_end(ctx
, EXCP_RI
);
11847 int16_t imm
= (uint8_t) ctx
->opcode
;
11849 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11854 int16_t imm
= (uint8_t) ctx
->opcode
;
11855 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11858 #if defined(TARGET_MIPS64)
11860 check_insn(ctx
, ISA_MIPS3
);
11861 check_mips_64(ctx
);
11862 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11866 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11869 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11872 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11875 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11878 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11881 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11884 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11886 #if defined (TARGET_MIPS64)
11888 check_insn(ctx
, ISA_MIPS3
);
11889 check_mips_64(ctx
);
11890 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11894 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11897 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11900 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11903 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11907 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11910 switch (ctx
->opcode
& 0x3) {
11912 mips32_op
= OPC_ADDU
;
11915 mips32_op
= OPC_SUBU
;
11917 #if defined(TARGET_MIPS64)
11919 mips32_op
= OPC_DADDU
;
11920 check_insn(ctx
, ISA_MIPS3
);
11921 check_mips_64(ctx
);
11924 mips32_op
= OPC_DSUBU
;
11925 check_insn(ctx
, ISA_MIPS3
);
11926 check_mips_64(ctx
);
11930 generate_exception_end(ctx
, EXCP_RI
);
11934 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11943 int nd
= (ctx
->opcode
>> 7) & 0x1;
11944 int link
= (ctx
->opcode
>> 6) & 0x1;
11945 int ra
= (ctx
->opcode
>> 5) & 0x1;
11948 check_insn(ctx
, ISA_MIPS32
);
11957 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11962 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11963 gen_helper_do_semihosting(cpu_env
);
11965 /* XXX: not clear which exception should be raised
11966 * when in debug mode...
11968 check_insn(ctx
, ISA_MIPS32
);
11969 generate_exception_end(ctx
, EXCP_DBp
);
11973 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11976 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11979 generate_exception_end(ctx
, EXCP_BREAK
);
11982 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11985 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11988 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11990 #if defined (TARGET_MIPS64)
11992 check_insn(ctx
, ISA_MIPS3
);
11993 check_mips_64(ctx
);
11994 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11998 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12001 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12004 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12007 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12010 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12013 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12016 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12019 check_insn(ctx
, ISA_MIPS32
);
12021 case RR_RY_CNVT_ZEB
:
12022 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12024 case RR_RY_CNVT_ZEH
:
12025 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12027 case RR_RY_CNVT_SEB
:
12028 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12030 case RR_RY_CNVT_SEH
:
12031 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12033 #if defined (TARGET_MIPS64)
12034 case RR_RY_CNVT_ZEW
:
12035 check_insn(ctx
, ISA_MIPS64
);
12036 check_mips_64(ctx
);
12037 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12039 case RR_RY_CNVT_SEW
:
12040 check_insn(ctx
, ISA_MIPS64
);
12041 check_mips_64(ctx
);
12042 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12046 generate_exception_end(ctx
, EXCP_RI
);
12051 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12053 #if defined (TARGET_MIPS64)
12055 check_insn(ctx
, ISA_MIPS3
);
12056 check_mips_64(ctx
);
12057 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12060 check_insn(ctx
, ISA_MIPS3
);
12061 check_mips_64(ctx
);
12062 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12065 check_insn(ctx
, ISA_MIPS3
);
12066 check_mips_64(ctx
);
12067 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12070 check_insn(ctx
, ISA_MIPS3
);
12071 check_mips_64(ctx
);
12072 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12076 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12079 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12082 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12085 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12087 #if defined (TARGET_MIPS64)
12089 check_insn(ctx
, ISA_MIPS3
);
12090 check_mips_64(ctx
);
12091 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12094 check_insn(ctx
, ISA_MIPS3
);
12095 check_mips_64(ctx
);
12096 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12099 check_insn(ctx
, ISA_MIPS3
);
12100 check_mips_64(ctx
);
12101 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12104 check_insn(ctx
, ISA_MIPS3
);
12105 check_mips_64(ctx
);
12106 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12110 generate_exception_end(ctx
, EXCP_RI
);
12114 case M16_OPC_EXTEND
:
12115 decode_extended_mips16_opc(env
, ctx
);
12118 #if defined(TARGET_MIPS64)
12120 funct
= (ctx
->opcode
>> 8) & 0x7;
12121 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12125 generate_exception_end(ctx
, EXCP_RI
);
12132 /* microMIPS extension to MIPS32/MIPS64 */
12135 * microMIPS32/microMIPS64 major opcodes
12137 * 1. MIPS Architecture for Programmers Volume II-B:
12138 * The microMIPS32 Instruction Set (Revision 3.05)
12140 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12142 * 2. MIPS Architecture For Programmers Volume II-A:
12143 * The MIPS64 Instruction Set (Revision 3.51)
12173 POOL32S
= 0x16, /* MIPS64 */
12174 DADDIU32
= 0x17, /* MIPS64 */
12203 /* 0x29 is reserved */
12216 /* 0x31 is reserved */
12229 SD32
= 0x36, /* MIPS64 */
12230 LD32
= 0x37, /* MIPS64 */
12232 /* 0x39 is reserved */
12248 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12258 /* POOL32A encoding of minor opcode field */
12261 /* These opcodes are distinguished only by bits 9..6; those bits are
12262 * what are recorded below. */
12299 /* The following can be distinguished by their lower 6 bits. */
12309 /* POOL32AXF encoding of minor opcode field extension */
12312 * 1. MIPS Architecture for Programmers Volume II-B:
12313 * The microMIPS32 Instruction Set (Revision 3.05)
12315 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12317 * 2. MIPS Architecture for Programmers VolumeIV-e:
12318 * The MIPS DSP Application-Specific Extension
12319 * to the microMIPS32 Architecture (Revision 2.34)
12321 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12336 /* begin of microMIPS32 DSP */
12338 /* bits 13..12 for 0x01 */
12344 /* bits 13..12 for 0x2a */
12350 /* bits 13..12 for 0x32 */
12354 /* end of microMIPS32 DSP */
12356 /* bits 15..12 for 0x2c */
12373 /* bits 15..12 for 0x34 */
12381 /* bits 15..12 for 0x3c */
12383 JR
= 0x0, /* alias */
12391 /* bits 15..12 for 0x05 */
12395 /* bits 15..12 for 0x0d */
12407 /* bits 15..12 for 0x15 */
12413 /* bits 15..12 for 0x1d */
12417 /* bits 15..12 for 0x2d */
12422 /* bits 15..12 for 0x35 */
12429 /* POOL32B encoding of minor opcode field (bits 15..12) */
12445 /* POOL32C encoding of minor opcode field (bits 15..12) */
12453 /* 0xa is reserved */
12460 /* 0x6 is reserved */
12466 /* POOL32F encoding of minor opcode field (bits 5..0) */
12469 /* These are the bit 7..6 values */
12478 /* These are the bit 8..6 values */
12503 MOVZ_FMT_05
= 0x05,
12537 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12544 /* POOL32Fxf encoding of minor opcode extension field */
12582 /* POOL32I encoding of minor opcode field (bits 25..21) */
12612 /* These overlap and are distinguished by bit16 of the instruction */
12621 /* POOL16A encoding of minor opcode field */
12628 /* POOL16B encoding of minor opcode field */
12635 /* POOL16C encoding of minor opcode field */
12655 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12675 /* POOL16D encoding of minor opcode field */
12682 /* POOL16E encoding of minor opcode field */
12689 static int mmreg (int r
)
12691 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12696 /* Used for 16-bit store instructions. */
12697 static int mmreg2 (int r
)
12699 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12704 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12705 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12706 #define uMIPS_RS2(op) uMIPS_RS(op)
12707 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12708 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12709 #define uMIPS_RS5(op) (op & 0x1f)
12711 /* Signed immediate */
12712 #define SIMM(op, start, width) \
12713 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12716 /* Zero-extended immediate */
12717 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12719 static void gen_addiur1sp(DisasContext
*ctx
)
12721 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12723 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12726 static void gen_addiur2(DisasContext
*ctx
)
12728 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12729 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12730 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12732 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12735 static void gen_addiusp(DisasContext
*ctx
)
12737 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12740 if (encoded
<= 1) {
12741 decoded
= 256 + encoded
;
12742 } else if (encoded
<= 255) {
12744 } else if (encoded
<= 509) {
12745 decoded
= encoded
- 512;
12747 decoded
= encoded
- 768;
12750 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12753 static void gen_addius5(DisasContext
*ctx
)
12755 int imm
= SIMM(ctx
->opcode
, 1, 4);
12756 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12758 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12761 static void gen_andi16(DisasContext
*ctx
)
12763 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12764 31, 32, 63, 64, 255, 32768, 65535 };
12765 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12766 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12767 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12769 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12772 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12773 int base
, int16_t offset
)
12778 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12779 generate_exception_end(ctx
, EXCP_RI
);
12783 t0
= tcg_temp_new();
12785 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12787 t1
= tcg_const_tl(reglist
);
12788 t2
= tcg_const_i32(ctx
->mem_idx
);
12790 save_cpu_state(ctx
, 1);
12793 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12796 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12798 #ifdef TARGET_MIPS64
12800 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12803 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12809 tcg_temp_free_i32(t2
);
12813 static void gen_pool16c_insn(DisasContext
*ctx
)
12815 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12816 int rs
= mmreg(ctx
->opcode
& 0x7);
12818 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12823 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12829 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12835 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12841 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12848 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12849 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12851 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12860 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12861 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12863 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12870 int reg
= ctx
->opcode
& 0x1f;
12872 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12878 int reg
= ctx
->opcode
& 0x1f;
12879 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12880 /* Let normal delay slot handling in our caller take us
12881 to the branch target. */
12886 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12887 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12891 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12892 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12896 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12900 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12903 generate_exception_end(ctx
, EXCP_BREAK
);
12906 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12907 gen_helper_do_semihosting(cpu_env
);
12909 /* XXX: not clear which exception should be raised
12910 * when in debug mode...
12912 check_insn(ctx
, ISA_MIPS32
);
12913 generate_exception_end(ctx
, EXCP_DBp
);
12916 case JRADDIUSP
+ 0:
12917 case JRADDIUSP
+ 1:
12919 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12920 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12921 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12922 /* Let normal delay slot handling in our caller take us
12923 to the branch target. */
12927 generate_exception_end(ctx
, EXCP_RI
);
12932 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12935 int rd
, rs
, re
, rt
;
12936 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12937 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12938 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12939 rd
= rd_enc
[enc_dest
];
12940 re
= re_enc
[enc_dest
];
12941 rs
= rs_rt_enc
[enc_rs
];
12942 rt
= rs_rt_enc
[enc_rt
];
12944 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12946 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12949 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12951 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12955 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12957 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12958 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12960 switch (ctx
->opcode
& 0xf) {
12962 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12965 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12969 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12970 int offset
= extract32(ctx
->opcode
, 4, 4);
12971 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12974 case R6_JRC16
: /* JRCADDIUSP */
12975 if ((ctx
->opcode
>> 4) & 1) {
12977 int imm
= extract32(ctx
->opcode
, 5, 5);
12978 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12979 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12982 int rs
= extract32(ctx
->opcode
, 5, 5);
12983 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12986 case MOVEP
... MOVEP_07
:
12987 case MOVEP_0C
... MOVEP_0F
:
12989 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12990 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12991 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12992 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12996 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12999 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13003 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13004 int offset
= extract32(ctx
->opcode
, 4, 4);
13005 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13008 case JALRC16
: /* BREAK16, SDBBP16 */
13009 switch (ctx
->opcode
& 0x3f) {
13011 case JALRC16
+ 0x20:
13013 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13018 generate_exception(ctx
, EXCP_BREAK
);
13022 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13023 gen_helper_do_semihosting(cpu_env
);
13025 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13026 generate_exception(ctx
, EXCP_RI
);
13028 generate_exception(ctx
, EXCP_DBp
);
13035 generate_exception(ctx
, EXCP_RI
);
13040 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13042 TCGv t0
= tcg_temp_new();
13043 TCGv t1
= tcg_temp_new();
13045 gen_load_gpr(t0
, base
);
13048 gen_load_gpr(t1
, index
);
13049 tcg_gen_shli_tl(t1
, t1
, 2);
13050 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13053 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13054 gen_store_gpr(t1
, rd
);
13060 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13061 int base
, int16_t offset
)
13065 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13066 generate_exception_end(ctx
, EXCP_RI
);
13070 t0
= tcg_temp_new();
13071 t1
= tcg_temp_new();
13073 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13078 generate_exception_end(ctx
, EXCP_RI
);
13081 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13082 gen_store_gpr(t1
, rd
);
13083 tcg_gen_movi_tl(t1
, 4);
13084 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13085 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13086 gen_store_gpr(t1
, rd
+1);
13089 gen_load_gpr(t1
, rd
);
13090 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13091 tcg_gen_movi_tl(t1
, 4);
13092 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13093 gen_load_gpr(t1
, rd
+1);
13094 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13096 #ifdef TARGET_MIPS64
13099 generate_exception_end(ctx
, EXCP_RI
);
13102 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13103 gen_store_gpr(t1
, rd
);
13104 tcg_gen_movi_tl(t1
, 8);
13105 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13106 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13107 gen_store_gpr(t1
, rd
+1);
13110 gen_load_gpr(t1
, rd
);
13111 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13112 tcg_gen_movi_tl(t1
, 8);
13113 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13114 gen_load_gpr(t1
, rd
+1);
13115 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13123 static void gen_sync(int stype
)
13125 TCGBar tcg_mo
= TCG_BAR_SC
;
13128 case 0x4: /* SYNC_WMB */
13129 tcg_mo
|= TCG_MO_ST_ST
;
13131 case 0x10: /* SYNC_MB */
13132 tcg_mo
|= TCG_MO_ALL
;
13134 case 0x11: /* SYNC_ACQUIRE */
13135 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13137 case 0x12: /* SYNC_RELEASE */
13138 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13140 case 0x13: /* SYNC_RMB */
13141 tcg_mo
|= TCG_MO_LD_LD
;
13144 tcg_mo
|= TCG_MO_ALL
;
13148 tcg_gen_mb(tcg_mo
);
13151 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13153 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13154 int minor
= (ctx
->opcode
>> 12) & 0xf;
13155 uint32_t mips32_op
;
13157 switch (extension
) {
13159 mips32_op
= OPC_TEQ
;
13162 mips32_op
= OPC_TGE
;
13165 mips32_op
= OPC_TGEU
;
13168 mips32_op
= OPC_TLT
;
13171 mips32_op
= OPC_TLTU
;
13174 mips32_op
= OPC_TNE
;
13176 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13178 #ifndef CONFIG_USER_ONLY
13181 check_cp0_enabled(ctx
);
13183 /* Treat as NOP. */
13186 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13190 check_cp0_enabled(ctx
);
13192 TCGv t0
= tcg_temp_new();
13194 gen_load_gpr(t0
, rt
);
13195 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13201 switch (minor
& 3) {
13203 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13206 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13209 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13212 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13215 goto pool32axf_invalid
;
13219 switch (minor
& 3) {
13221 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13224 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13227 goto pool32axf_invalid
;
13233 check_insn(ctx
, ISA_MIPS32R6
);
13234 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13237 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13240 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13243 mips32_op
= OPC_CLO
;
13246 mips32_op
= OPC_CLZ
;
13248 check_insn(ctx
, ISA_MIPS32
);
13249 gen_cl(ctx
, mips32_op
, rt
, rs
);
13252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13253 gen_rdhwr(ctx
, rt
, rs
, 0);
13256 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13259 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13260 mips32_op
= OPC_MULT
;
13263 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13264 mips32_op
= OPC_MULTU
;
13267 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13268 mips32_op
= OPC_DIV
;
13271 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13272 mips32_op
= OPC_DIVU
;
13275 check_insn(ctx
, ISA_MIPS32
);
13276 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13279 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13280 mips32_op
= OPC_MADD
;
13283 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13284 mips32_op
= OPC_MADDU
;
13287 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13288 mips32_op
= OPC_MSUB
;
13291 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13292 mips32_op
= OPC_MSUBU
;
13294 check_insn(ctx
, ISA_MIPS32
);
13295 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13298 goto pool32axf_invalid
;
13309 generate_exception_err(ctx
, EXCP_CpU
, 2);
13312 goto pool32axf_invalid
;
13317 case JALR
: /* JALRC */
13318 case JALR_HB
: /* JALRC_HB */
13319 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13320 /* JALRC, JALRC_HB */
13321 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13323 /* JALR, JALR_HB */
13324 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13325 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13331 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13332 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13335 goto pool32axf_invalid
;
13341 check_cp0_enabled(ctx
);
13342 check_insn(ctx
, ISA_MIPS32R2
);
13343 gen_load_srsgpr(rs
, rt
);
13346 check_cp0_enabled(ctx
);
13347 check_insn(ctx
, ISA_MIPS32R2
);
13348 gen_store_srsgpr(rs
, rt
);
13351 goto pool32axf_invalid
;
13354 #ifndef CONFIG_USER_ONLY
13358 mips32_op
= OPC_TLBP
;
13361 mips32_op
= OPC_TLBR
;
13364 mips32_op
= OPC_TLBWI
;
13367 mips32_op
= OPC_TLBWR
;
13370 mips32_op
= OPC_TLBINV
;
13373 mips32_op
= OPC_TLBINVF
;
13376 mips32_op
= OPC_WAIT
;
13379 mips32_op
= OPC_DERET
;
13382 mips32_op
= OPC_ERET
;
13384 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13387 goto pool32axf_invalid
;
13393 check_cp0_enabled(ctx
);
13395 TCGv t0
= tcg_temp_new();
13397 save_cpu_state(ctx
, 1);
13398 gen_helper_di(t0
, cpu_env
);
13399 gen_store_gpr(t0
, rs
);
13400 /* Stop translation as we may have switched the execution mode */
13401 ctx
->bstate
= BS_STOP
;
13406 check_cp0_enabled(ctx
);
13408 TCGv t0
= tcg_temp_new();
13410 save_cpu_state(ctx
, 1);
13411 gen_helper_ei(t0
, cpu_env
);
13412 gen_store_gpr(t0
, rs
);
13413 /* Stop translation as we may have switched the execution mode */
13414 ctx
->bstate
= BS_STOP
;
13419 goto pool32axf_invalid
;
13426 gen_sync(extract32(ctx
->opcode
, 16, 5));
13429 generate_exception_end(ctx
, EXCP_SYSCALL
);
13432 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13433 gen_helper_do_semihosting(cpu_env
);
13435 check_insn(ctx
, ISA_MIPS32
);
13436 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13437 generate_exception_end(ctx
, EXCP_RI
);
13439 generate_exception_end(ctx
, EXCP_DBp
);
13444 goto pool32axf_invalid
;
13448 switch (minor
& 3) {
13450 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13453 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13456 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13459 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13462 goto pool32axf_invalid
;
13466 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13469 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13472 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13475 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13478 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13481 goto pool32axf_invalid
;
13486 MIPS_INVAL("pool32axf");
13487 generate_exception_end(ctx
, EXCP_RI
);
13492 /* Values for microMIPS fmt field. Variable-width, depending on which
13493 formats the instruction supports. */
13512 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13514 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13515 uint32_t mips32_op
;
13517 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13518 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13519 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13521 switch (extension
) {
13522 case FLOAT_1BIT_FMT(CFC1
, 0):
13523 mips32_op
= OPC_CFC1
;
13525 case FLOAT_1BIT_FMT(CTC1
, 0):
13526 mips32_op
= OPC_CTC1
;
13528 case FLOAT_1BIT_FMT(MFC1
, 0):
13529 mips32_op
= OPC_MFC1
;
13531 case FLOAT_1BIT_FMT(MTC1
, 0):
13532 mips32_op
= OPC_MTC1
;
13534 case FLOAT_1BIT_FMT(MFHC1
, 0):
13535 mips32_op
= OPC_MFHC1
;
13537 case FLOAT_1BIT_FMT(MTHC1
, 0):
13538 mips32_op
= OPC_MTHC1
;
13540 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13543 /* Reciprocal square root */
13544 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13545 mips32_op
= OPC_RSQRT_S
;
13547 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13548 mips32_op
= OPC_RSQRT_D
;
13552 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13553 mips32_op
= OPC_SQRT_S
;
13555 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13556 mips32_op
= OPC_SQRT_D
;
13560 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13561 mips32_op
= OPC_RECIP_S
;
13563 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13564 mips32_op
= OPC_RECIP_D
;
13568 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13569 mips32_op
= OPC_FLOOR_L_S
;
13571 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13572 mips32_op
= OPC_FLOOR_L_D
;
13574 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13575 mips32_op
= OPC_FLOOR_W_S
;
13577 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13578 mips32_op
= OPC_FLOOR_W_D
;
13582 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13583 mips32_op
= OPC_CEIL_L_S
;
13585 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13586 mips32_op
= OPC_CEIL_L_D
;
13588 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13589 mips32_op
= OPC_CEIL_W_S
;
13591 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13592 mips32_op
= OPC_CEIL_W_D
;
13596 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13597 mips32_op
= OPC_TRUNC_L_S
;
13599 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13600 mips32_op
= OPC_TRUNC_L_D
;
13602 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13603 mips32_op
= OPC_TRUNC_W_S
;
13605 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13606 mips32_op
= OPC_TRUNC_W_D
;
13610 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13611 mips32_op
= OPC_ROUND_L_S
;
13613 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13614 mips32_op
= OPC_ROUND_L_D
;
13616 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13617 mips32_op
= OPC_ROUND_W_S
;
13619 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13620 mips32_op
= OPC_ROUND_W_D
;
13623 /* Integer to floating-point conversion */
13624 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13625 mips32_op
= OPC_CVT_L_S
;
13627 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13628 mips32_op
= OPC_CVT_L_D
;
13630 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13631 mips32_op
= OPC_CVT_W_S
;
13633 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13634 mips32_op
= OPC_CVT_W_D
;
13637 /* Paired-foo conversions */
13638 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13639 mips32_op
= OPC_CVT_S_PL
;
13641 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13642 mips32_op
= OPC_CVT_S_PU
;
13644 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13645 mips32_op
= OPC_CVT_PW_PS
;
13647 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13648 mips32_op
= OPC_CVT_PS_PW
;
13651 /* Floating-point moves */
13652 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13653 mips32_op
= OPC_MOV_S
;
13655 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13656 mips32_op
= OPC_MOV_D
;
13658 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13659 mips32_op
= OPC_MOV_PS
;
13662 /* Absolute value */
13663 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13664 mips32_op
= OPC_ABS_S
;
13666 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13667 mips32_op
= OPC_ABS_D
;
13669 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13670 mips32_op
= OPC_ABS_PS
;
13674 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13675 mips32_op
= OPC_NEG_S
;
13677 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13678 mips32_op
= OPC_NEG_D
;
13680 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13681 mips32_op
= OPC_NEG_PS
;
13684 /* Reciprocal square root step */
13685 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13686 mips32_op
= OPC_RSQRT1_S
;
13688 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13689 mips32_op
= OPC_RSQRT1_D
;
13691 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13692 mips32_op
= OPC_RSQRT1_PS
;
13695 /* Reciprocal step */
13696 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13697 mips32_op
= OPC_RECIP1_S
;
13699 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13700 mips32_op
= OPC_RECIP1_S
;
13702 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13703 mips32_op
= OPC_RECIP1_PS
;
13706 /* Conversions from double */
13707 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13708 mips32_op
= OPC_CVT_D_S
;
13710 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13711 mips32_op
= OPC_CVT_D_W
;
13713 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13714 mips32_op
= OPC_CVT_D_L
;
13717 /* Conversions from single */
13718 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13719 mips32_op
= OPC_CVT_S_D
;
13721 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13722 mips32_op
= OPC_CVT_S_W
;
13724 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13725 mips32_op
= OPC_CVT_S_L
;
13727 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13730 /* Conditional moves on floating-point codes */
13731 case COND_FLOAT_MOV(MOVT
, 0):
13732 case COND_FLOAT_MOV(MOVT
, 1):
13733 case COND_FLOAT_MOV(MOVT
, 2):
13734 case COND_FLOAT_MOV(MOVT
, 3):
13735 case COND_FLOAT_MOV(MOVT
, 4):
13736 case COND_FLOAT_MOV(MOVT
, 5):
13737 case COND_FLOAT_MOV(MOVT
, 6):
13738 case COND_FLOAT_MOV(MOVT
, 7):
13739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13740 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13742 case COND_FLOAT_MOV(MOVF
, 0):
13743 case COND_FLOAT_MOV(MOVF
, 1):
13744 case COND_FLOAT_MOV(MOVF
, 2):
13745 case COND_FLOAT_MOV(MOVF
, 3):
13746 case COND_FLOAT_MOV(MOVF
, 4):
13747 case COND_FLOAT_MOV(MOVF
, 5):
13748 case COND_FLOAT_MOV(MOVF
, 6):
13749 case COND_FLOAT_MOV(MOVF
, 7):
13750 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13751 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13754 MIPS_INVAL("pool32fxf");
13755 generate_exception_end(ctx
, EXCP_RI
);
13760 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13764 int rt
, rs
, rd
, rr
;
13766 uint32_t op
, minor
, mips32_op
;
13767 uint32_t cond
, fmt
, cc
;
13769 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13770 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13772 rt
= (ctx
->opcode
>> 21) & 0x1f;
13773 rs
= (ctx
->opcode
>> 16) & 0x1f;
13774 rd
= (ctx
->opcode
>> 11) & 0x1f;
13775 rr
= (ctx
->opcode
>> 6) & 0x1f;
13776 imm
= (int16_t) ctx
->opcode
;
13778 op
= (ctx
->opcode
>> 26) & 0x3f;
13781 minor
= ctx
->opcode
& 0x3f;
13784 minor
= (ctx
->opcode
>> 6) & 0xf;
13787 mips32_op
= OPC_SLL
;
13790 mips32_op
= OPC_SRA
;
13793 mips32_op
= OPC_SRL
;
13796 mips32_op
= OPC_ROTR
;
13798 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13801 check_insn(ctx
, ISA_MIPS32R6
);
13802 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13805 check_insn(ctx
, ISA_MIPS32R6
);
13806 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13809 check_insn(ctx
, ISA_MIPS32R6
);
13810 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13813 goto pool32a_invalid
;
13817 minor
= (ctx
->opcode
>> 6) & 0xf;
13821 mips32_op
= OPC_ADD
;
13824 mips32_op
= OPC_ADDU
;
13827 mips32_op
= OPC_SUB
;
13830 mips32_op
= OPC_SUBU
;
13833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13834 mips32_op
= OPC_MUL
;
13836 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13840 mips32_op
= OPC_SLLV
;
13843 mips32_op
= OPC_SRLV
;
13846 mips32_op
= OPC_SRAV
;
13849 mips32_op
= OPC_ROTRV
;
13851 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13853 /* Logical operations */
13855 mips32_op
= OPC_AND
;
13858 mips32_op
= OPC_OR
;
13861 mips32_op
= OPC_NOR
;
13864 mips32_op
= OPC_XOR
;
13866 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13868 /* Set less than */
13870 mips32_op
= OPC_SLT
;
13873 mips32_op
= OPC_SLTU
;
13875 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13878 goto pool32a_invalid
;
13882 minor
= (ctx
->opcode
>> 6) & 0xf;
13884 /* Conditional moves */
13885 case MOVN
: /* MUL */
13886 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13888 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13891 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13894 case MOVZ
: /* MUH */
13895 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13897 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13900 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13904 check_insn(ctx
, ISA_MIPS32R6
);
13905 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13908 check_insn(ctx
, ISA_MIPS32R6
);
13909 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13911 case LWXS
: /* DIV */
13912 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13914 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13917 gen_ldxs(ctx
, rs
, rt
, rd
);
13921 check_insn(ctx
, ISA_MIPS32R6
);
13922 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13925 check_insn(ctx
, ISA_MIPS32R6
);
13926 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13929 check_insn(ctx
, ISA_MIPS32R6
);
13930 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13933 goto pool32a_invalid
;
13937 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13940 check_insn(ctx
, ISA_MIPS32R6
);
13941 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13942 extract32(ctx
->opcode
, 9, 2));
13945 check_insn(ctx
, ISA_MIPS32R6
);
13946 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13947 extract32(ctx
->opcode
, 9, 2));
13950 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13953 gen_pool32axf(env
, ctx
, rt
, rs
);
13956 generate_exception_end(ctx
, EXCP_BREAK
);
13959 check_insn(ctx
, ISA_MIPS32R6
);
13960 generate_exception_end(ctx
, EXCP_RI
);
13964 MIPS_INVAL("pool32a");
13965 generate_exception_end(ctx
, EXCP_RI
);
13970 minor
= (ctx
->opcode
>> 12) & 0xf;
13973 check_cp0_enabled(ctx
);
13974 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13975 gen_cache_operation(ctx
, rt
, rs
, imm
);
13980 /* COP2: Not implemented. */
13981 generate_exception_err(ctx
, EXCP_CpU
, 2);
13983 #ifdef TARGET_MIPS64
13986 check_insn(ctx
, ISA_MIPS3
);
13987 check_mips_64(ctx
);
13992 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13994 #ifdef TARGET_MIPS64
13997 check_insn(ctx
, ISA_MIPS3
);
13998 check_mips_64(ctx
);
14003 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14006 MIPS_INVAL("pool32b");
14007 generate_exception_end(ctx
, EXCP_RI
);
14012 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14013 minor
= ctx
->opcode
& 0x3f;
14014 check_cp1_enabled(ctx
);
14017 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14018 mips32_op
= OPC_ALNV_PS
;
14021 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14022 mips32_op
= OPC_MADD_S
;
14025 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14026 mips32_op
= OPC_MADD_D
;
14029 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14030 mips32_op
= OPC_MADD_PS
;
14033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14034 mips32_op
= OPC_MSUB_S
;
14037 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14038 mips32_op
= OPC_MSUB_D
;
14041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14042 mips32_op
= OPC_MSUB_PS
;
14045 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14046 mips32_op
= OPC_NMADD_S
;
14049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14050 mips32_op
= OPC_NMADD_D
;
14053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14054 mips32_op
= OPC_NMADD_PS
;
14057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14058 mips32_op
= OPC_NMSUB_S
;
14061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14062 mips32_op
= OPC_NMSUB_D
;
14065 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14066 mips32_op
= OPC_NMSUB_PS
;
14068 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14070 case CABS_COND_FMT
:
14071 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14072 cond
= (ctx
->opcode
>> 6) & 0xf;
14073 cc
= (ctx
->opcode
>> 13) & 0x7;
14074 fmt
= (ctx
->opcode
>> 10) & 0x3;
14077 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14080 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14083 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14086 goto pool32f_invalid
;
14090 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14091 cond
= (ctx
->opcode
>> 6) & 0xf;
14092 cc
= (ctx
->opcode
>> 13) & 0x7;
14093 fmt
= (ctx
->opcode
>> 10) & 0x3;
14096 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14099 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14102 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14105 goto pool32f_invalid
;
14109 check_insn(ctx
, ISA_MIPS32R6
);
14110 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14113 check_insn(ctx
, ISA_MIPS32R6
);
14114 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14117 gen_pool32fxf(ctx
, rt
, rs
);
14121 switch ((ctx
->opcode
>> 6) & 0x7) {
14123 mips32_op
= OPC_PLL_PS
;
14126 mips32_op
= OPC_PLU_PS
;
14129 mips32_op
= OPC_PUL_PS
;
14132 mips32_op
= OPC_PUU_PS
;
14135 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14136 mips32_op
= OPC_CVT_PS_S
;
14138 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14141 goto pool32f_invalid
;
14145 check_insn(ctx
, ISA_MIPS32R6
);
14146 switch ((ctx
->opcode
>> 9) & 0x3) {
14148 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14151 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14154 goto pool32f_invalid
;
14159 switch ((ctx
->opcode
>> 6) & 0x7) {
14161 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14162 mips32_op
= OPC_LWXC1
;
14165 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14166 mips32_op
= OPC_SWXC1
;
14169 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14170 mips32_op
= OPC_LDXC1
;
14173 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14174 mips32_op
= OPC_SDXC1
;
14177 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14178 mips32_op
= OPC_LUXC1
;
14181 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14182 mips32_op
= OPC_SUXC1
;
14184 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14187 goto pool32f_invalid
;
14191 check_insn(ctx
, ISA_MIPS32R6
);
14192 switch ((ctx
->opcode
>> 9) & 0x3) {
14194 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14197 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14200 goto pool32f_invalid
;
14205 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14206 fmt
= (ctx
->opcode
>> 9) & 0x3;
14207 switch ((ctx
->opcode
>> 6) & 0x7) {
14211 mips32_op
= OPC_RSQRT2_S
;
14214 mips32_op
= OPC_RSQRT2_D
;
14217 mips32_op
= OPC_RSQRT2_PS
;
14220 goto pool32f_invalid
;
14226 mips32_op
= OPC_RECIP2_S
;
14229 mips32_op
= OPC_RECIP2_D
;
14232 mips32_op
= OPC_RECIP2_PS
;
14235 goto pool32f_invalid
;
14239 mips32_op
= OPC_ADDR_PS
;
14242 mips32_op
= OPC_MULR_PS
;
14244 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14247 goto pool32f_invalid
;
14251 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14252 cc
= (ctx
->opcode
>> 13) & 0x7;
14253 fmt
= (ctx
->opcode
>> 9) & 0x3;
14254 switch ((ctx
->opcode
>> 6) & 0x7) {
14255 case MOVF_FMT
: /* RINT_FMT */
14256 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14260 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14263 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14266 goto pool32f_invalid
;
14272 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14275 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14279 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14282 goto pool32f_invalid
;
14286 case MOVT_FMT
: /* CLASS_FMT */
14287 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14291 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14294 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14297 goto pool32f_invalid
;
14303 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14306 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14310 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14313 goto pool32f_invalid
;
14318 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14321 goto pool32f_invalid
;
14324 #define FINSN_3ARG_SDPS(prfx) \
14325 switch ((ctx->opcode >> 8) & 0x3) { \
14327 mips32_op = OPC_##prfx##_S; \
14330 mips32_op = OPC_##prfx##_D; \
14332 case FMT_SDPS_PS: \
14334 mips32_op = OPC_##prfx##_PS; \
14337 goto pool32f_invalid; \
14340 check_insn(ctx
, ISA_MIPS32R6
);
14341 switch ((ctx
->opcode
>> 9) & 0x3) {
14343 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14346 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14349 goto pool32f_invalid
;
14353 check_insn(ctx
, ISA_MIPS32R6
);
14354 switch ((ctx
->opcode
>> 9) & 0x3) {
14356 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14359 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14362 goto pool32f_invalid
;
14366 /* regular FP ops */
14367 switch ((ctx
->opcode
>> 6) & 0x3) {
14369 FINSN_3ARG_SDPS(ADD
);
14372 FINSN_3ARG_SDPS(SUB
);
14375 FINSN_3ARG_SDPS(MUL
);
14378 fmt
= (ctx
->opcode
>> 8) & 0x3;
14380 mips32_op
= OPC_DIV_D
;
14381 } else if (fmt
== 0) {
14382 mips32_op
= OPC_DIV_S
;
14384 goto pool32f_invalid
;
14388 goto pool32f_invalid
;
14393 switch ((ctx
->opcode
>> 6) & 0x7) {
14394 case MOVN_FMT
: /* SELNEZ_FMT */
14395 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14397 switch ((ctx
->opcode
>> 9) & 0x3) {
14399 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14402 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14405 goto pool32f_invalid
;
14409 FINSN_3ARG_SDPS(MOVN
);
14413 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14414 FINSN_3ARG_SDPS(MOVN
);
14416 case MOVZ_FMT
: /* SELEQZ_FMT */
14417 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14419 switch ((ctx
->opcode
>> 9) & 0x3) {
14421 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14424 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14427 goto pool32f_invalid
;
14431 FINSN_3ARG_SDPS(MOVZ
);
14435 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14436 FINSN_3ARG_SDPS(MOVZ
);
14439 check_insn(ctx
, ISA_MIPS32R6
);
14440 switch ((ctx
->opcode
>> 9) & 0x3) {
14442 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14445 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14448 goto pool32f_invalid
;
14452 check_insn(ctx
, ISA_MIPS32R6
);
14453 switch ((ctx
->opcode
>> 9) & 0x3) {
14455 mips32_op
= OPC_MADDF_S
;
14458 mips32_op
= OPC_MADDF_D
;
14461 goto pool32f_invalid
;
14465 check_insn(ctx
, ISA_MIPS32R6
);
14466 switch ((ctx
->opcode
>> 9) & 0x3) {
14468 mips32_op
= OPC_MSUBF_S
;
14471 mips32_op
= OPC_MSUBF_D
;
14474 goto pool32f_invalid
;
14478 goto pool32f_invalid
;
14482 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14486 MIPS_INVAL("pool32f");
14487 generate_exception_end(ctx
, EXCP_RI
);
14491 generate_exception_err(ctx
, EXCP_CpU
, 1);
14495 minor
= (ctx
->opcode
>> 21) & 0x1f;
14498 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14499 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14502 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14503 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14504 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14508 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14509 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14512 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14513 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14517 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14518 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14521 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14522 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14523 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14527 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14530 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14531 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14535 case TLTI
: /* BC1EQZC */
14536 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14538 check_cp1_enabled(ctx
);
14539 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14542 mips32_op
= OPC_TLTI
;
14546 case TGEI
: /* BC1NEZC */
14547 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14549 check_cp1_enabled(ctx
);
14550 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14553 mips32_op
= OPC_TGEI
;
14558 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14559 mips32_op
= OPC_TLTIU
;
14562 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14563 mips32_op
= OPC_TGEIU
;
14565 case TNEI
: /* SYNCI */
14566 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14568 /* Break the TB to be able to sync copied instructions
14570 ctx
->bstate
= BS_STOP
;
14573 mips32_op
= OPC_TNEI
;
14578 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14579 mips32_op
= OPC_TEQI
;
14581 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14586 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14587 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14588 4, rs
, 0, imm
<< 1, 0);
14589 /* Compact branches don't have a delay slot, so just let
14590 the normal delay slot handling take us to the branch
14594 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14595 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14598 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14599 /* Break the TB to be able to sync copied instructions
14601 ctx
->bstate
= BS_STOP
;
14605 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14606 /* COP2: Not implemented. */
14607 generate_exception_err(ctx
, EXCP_CpU
, 2);
14610 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14611 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14615 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14619 mips32_op
= OPC_BC1FANY4
;
14622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14623 mips32_op
= OPC_BC1TANY4
;
14626 check_insn(ctx
, ASE_MIPS3D
);
14629 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14630 check_cp1_enabled(ctx
);
14631 gen_compute_branch1(ctx
, mips32_op
,
14632 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14634 generate_exception_err(ctx
, EXCP_CpU
, 1);
14639 /* MIPS DSP: not implemented */
14642 MIPS_INVAL("pool32i");
14643 generate_exception_end(ctx
, EXCP_RI
);
14648 minor
= (ctx
->opcode
>> 12) & 0xf;
14649 offset
= sextract32(ctx
->opcode
, 0,
14650 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14653 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14654 mips32_op
= OPC_LWL
;
14657 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14658 mips32_op
= OPC_SWL
;
14661 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14662 mips32_op
= OPC_LWR
;
14665 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14666 mips32_op
= OPC_SWR
;
14668 #if defined(TARGET_MIPS64)
14670 check_insn(ctx
, ISA_MIPS3
);
14671 check_mips_64(ctx
);
14672 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14673 mips32_op
= OPC_LDL
;
14676 check_insn(ctx
, ISA_MIPS3
);
14677 check_mips_64(ctx
);
14678 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14679 mips32_op
= OPC_SDL
;
14682 check_insn(ctx
, ISA_MIPS3
);
14683 check_mips_64(ctx
);
14684 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14685 mips32_op
= OPC_LDR
;
14688 check_insn(ctx
, ISA_MIPS3
);
14689 check_mips_64(ctx
);
14690 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14691 mips32_op
= OPC_SDR
;
14694 check_insn(ctx
, ISA_MIPS3
);
14695 check_mips_64(ctx
);
14696 mips32_op
= OPC_LWU
;
14699 check_insn(ctx
, ISA_MIPS3
);
14700 check_mips_64(ctx
);
14701 mips32_op
= OPC_LLD
;
14705 mips32_op
= OPC_LL
;
14708 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14711 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14714 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14716 #if defined(TARGET_MIPS64)
14718 check_insn(ctx
, ISA_MIPS3
);
14719 check_mips_64(ctx
);
14720 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14724 /* Treat as no-op */
14725 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14726 /* hint codes 24-31 are reserved and signal RI */
14727 generate_exception(ctx
, EXCP_RI
);
14731 MIPS_INVAL("pool32c");
14732 generate_exception_end(ctx
, EXCP_RI
);
14736 case ADDI32
: /* AUI, LUI */
14737 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14739 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14742 mips32_op
= OPC_ADDI
;
14747 mips32_op
= OPC_ADDIU
;
14749 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14752 /* Logical operations */
14754 mips32_op
= OPC_ORI
;
14757 mips32_op
= OPC_XORI
;
14760 mips32_op
= OPC_ANDI
;
14762 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14765 /* Set less than immediate */
14767 mips32_op
= OPC_SLTI
;
14770 mips32_op
= OPC_SLTIU
;
14772 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14775 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14776 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14777 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14778 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14780 case JALS32
: /* BOVC, BEQC, BEQZALC */
14781 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14784 mips32_op
= OPC_BOVC
;
14785 } else if (rs
< rt
&& rs
== 0) {
14787 mips32_op
= OPC_BEQZALC
;
14790 mips32_op
= OPC_BEQC
;
14792 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14795 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14796 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14797 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14800 case BEQ32
: /* BC */
14801 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14803 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14804 sextract32(ctx
->opcode
<< 1, 0, 27));
14807 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14810 case BNE32
: /* BALC */
14811 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14813 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14814 sextract32(ctx
->opcode
<< 1, 0, 27));
14817 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14820 case J32
: /* BGTZC, BLTZC, BLTC */
14821 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14822 if (rs
== 0 && rt
!= 0) {
14824 mips32_op
= OPC_BGTZC
;
14825 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14827 mips32_op
= OPC_BLTZC
;
14830 mips32_op
= OPC_BLTC
;
14832 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14835 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14836 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14839 case JAL32
: /* BLEZC, BGEZC, BGEC */
14840 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14841 if (rs
== 0 && rt
!= 0) {
14843 mips32_op
= OPC_BLEZC
;
14844 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14846 mips32_op
= OPC_BGEZC
;
14849 mips32_op
= OPC_BGEC
;
14851 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14854 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14855 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14856 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14859 /* Floating point (COP1) */
14861 mips32_op
= OPC_LWC1
;
14864 mips32_op
= OPC_LDC1
;
14867 mips32_op
= OPC_SWC1
;
14870 mips32_op
= OPC_SDC1
;
14872 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14874 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14875 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14876 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14877 switch ((ctx
->opcode
>> 16) & 0x1f) {
14878 case ADDIUPC_00
... ADDIUPC_07
:
14879 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14882 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14885 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14887 case LWPC_08
... LWPC_0F
:
14888 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14891 generate_exception(ctx
, EXCP_RI
);
14896 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14897 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14899 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14902 case BNVC
: /* BNEC, BNEZALC */
14903 check_insn(ctx
, ISA_MIPS32R6
);
14906 mips32_op
= OPC_BNVC
;
14907 } else if (rs
< rt
&& rs
== 0) {
14909 mips32_op
= OPC_BNEZALC
;
14912 mips32_op
= OPC_BNEC
;
14914 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14916 case R6_BNEZC
: /* JIALC */
14917 check_insn(ctx
, ISA_MIPS32R6
);
14920 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14921 sextract32(ctx
->opcode
<< 1, 0, 22));
14924 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14927 case R6_BEQZC
: /* JIC */
14928 check_insn(ctx
, ISA_MIPS32R6
);
14931 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14932 sextract32(ctx
->opcode
<< 1, 0, 22));
14935 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14938 case BLEZALC
: /* BGEZALC, BGEUC */
14939 check_insn(ctx
, ISA_MIPS32R6
);
14940 if (rs
== 0 && rt
!= 0) {
14942 mips32_op
= OPC_BLEZALC
;
14943 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14945 mips32_op
= OPC_BGEZALC
;
14948 mips32_op
= OPC_BGEUC
;
14950 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14952 case BGTZALC
: /* BLTZALC, BLTUC */
14953 check_insn(ctx
, ISA_MIPS32R6
);
14954 if (rs
== 0 && rt
!= 0) {
14956 mips32_op
= OPC_BGTZALC
;
14957 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14959 mips32_op
= OPC_BLTZALC
;
14962 mips32_op
= OPC_BLTUC
;
14964 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14966 /* Loads and stores */
14968 mips32_op
= OPC_LB
;
14971 mips32_op
= OPC_LBU
;
14974 mips32_op
= OPC_LH
;
14977 mips32_op
= OPC_LHU
;
14980 mips32_op
= OPC_LW
;
14982 #ifdef TARGET_MIPS64
14984 check_insn(ctx
, ISA_MIPS3
);
14985 check_mips_64(ctx
);
14986 mips32_op
= OPC_LD
;
14989 check_insn(ctx
, ISA_MIPS3
);
14990 check_mips_64(ctx
);
14991 mips32_op
= OPC_SD
;
14995 mips32_op
= OPC_SB
;
14998 mips32_op
= OPC_SH
;
15001 mips32_op
= OPC_SW
;
15004 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15007 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15010 generate_exception_end(ctx
, EXCP_RI
);
15015 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15019 /* make sure instructions are on a halfword boundary */
15020 if (ctx
->pc
& 0x1) {
15021 env
->CP0_BadVAddr
= ctx
->pc
;
15022 generate_exception_end(ctx
, EXCP_AdEL
);
15026 op
= (ctx
->opcode
>> 10) & 0x3f;
15027 /* Enforce properly-sized instructions in a delay slot */
15028 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15029 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15031 /* POOL32A, POOL32B, POOL32I, POOL32C */
15033 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15035 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15037 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15039 /* LB32, LH32, LWC132, LDC132, LW32 */
15040 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15041 generate_exception_end(ctx
, EXCP_RI
);
15046 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15048 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15050 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15051 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15052 generate_exception_end(ctx
, EXCP_RI
);
15062 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15063 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15064 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15067 switch (ctx
->opcode
& 0x1) {
15075 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15076 /* In the Release 6 the register number location in
15077 * the instruction encoding has changed.
15079 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15081 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15087 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15088 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15089 int amount
= (ctx
->opcode
>> 1) & 0x7;
15091 amount
= amount
== 0 ? 8 : amount
;
15093 switch (ctx
->opcode
& 0x1) {
15102 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15106 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15107 gen_pool16c_r6_insn(ctx
);
15109 gen_pool16c_insn(ctx
);
15114 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15115 int rb
= 28; /* GP */
15116 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15118 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15123 if (ctx
->opcode
& 1) {
15124 generate_exception_end(ctx
, EXCP_RI
);
15127 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15128 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15129 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15130 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15135 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15136 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15137 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15138 offset
= (offset
== 0xf ? -1 : offset
);
15140 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15145 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15146 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15147 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15149 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15154 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15155 int rb
= 29; /* SP */
15156 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15158 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15163 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15164 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15165 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15167 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15172 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15173 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15174 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15176 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15181 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15182 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15183 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15185 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15190 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15191 int rb
= 29; /* SP */
15192 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15194 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15199 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15200 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15201 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15203 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15208 int rd
= uMIPS_RD5(ctx
->opcode
);
15209 int rs
= uMIPS_RS5(ctx
->opcode
);
15211 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15218 switch (ctx
->opcode
& 0x1) {
15228 switch (ctx
->opcode
& 0x1) {
15233 gen_addiur1sp(ctx
);
15237 case B16
: /* BC16 */
15238 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15239 sextract32(ctx
->opcode
, 0, 10) << 1,
15240 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15242 case BNEZ16
: /* BNEZC16 */
15243 case BEQZ16
: /* BEQZC16 */
15244 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15245 mmreg(uMIPS_RD(ctx
->opcode
)),
15246 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15247 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15252 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15253 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15255 imm
= (imm
== 0x7f ? -1 : imm
);
15256 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15262 generate_exception_end(ctx
, EXCP_RI
);
15265 decode_micromips32_opc(env
, ctx
);
15272 /* SmartMIPS extension to MIPS32 */
15274 #if defined(TARGET_MIPS64)
15276 /* MDMX extension to MIPS64 */
15280 /* MIPSDSP functions. */
15281 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15282 int rd
, int base
, int offset
)
15287 t0
= tcg_temp_new();
15290 gen_load_gpr(t0
, offset
);
15291 } else if (offset
== 0) {
15292 gen_load_gpr(t0
, base
);
15294 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15299 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15300 gen_store_gpr(t0
, rd
);
15303 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15304 gen_store_gpr(t0
, rd
);
15307 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15308 gen_store_gpr(t0
, rd
);
15310 #if defined(TARGET_MIPS64)
15312 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15313 gen_store_gpr(t0
, rd
);
15320 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15321 int ret
, int v1
, int v2
)
15327 /* Treat as NOP. */
15331 v1_t
= tcg_temp_new();
15332 v2_t
= tcg_temp_new();
15334 gen_load_gpr(v1_t
, v1
);
15335 gen_load_gpr(v2_t
, v2
);
15338 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15339 case OPC_MULT_G_2E
:
15343 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15345 case OPC_ADDUH_R_QB
:
15346 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15349 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15351 case OPC_ADDQH_R_PH
:
15352 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15355 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15357 case OPC_ADDQH_R_W
:
15358 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15361 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15363 case OPC_SUBUH_R_QB
:
15364 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15367 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15369 case OPC_SUBQH_R_PH
:
15370 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15373 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15375 case OPC_SUBQH_R_W
:
15376 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15380 case OPC_ABSQ_S_PH_DSP
:
15382 case OPC_ABSQ_S_QB
:
15384 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15386 case OPC_ABSQ_S_PH
:
15388 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15392 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15394 case OPC_PRECEQ_W_PHL
:
15396 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15397 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15399 case OPC_PRECEQ_W_PHR
:
15401 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15402 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15403 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15405 case OPC_PRECEQU_PH_QBL
:
15407 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15409 case OPC_PRECEQU_PH_QBR
:
15411 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15413 case OPC_PRECEQU_PH_QBLA
:
15415 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15417 case OPC_PRECEQU_PH_QBRA
:
15419 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15421 case OPC_PRECEU_PH_QBL
:
15423 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15425 case OPC_PRECEU_PH_QBR
:
15427 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15429 case OPC_PRECEU_PH_QBLA
:
15431 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15433 case OPC_PRECEU_PH_QBRA
:
15435 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15439 case OPC_ADDU_QB_DSP
:
15443 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15445 case OPC_ADDQ_S_PH
:
15447 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15451 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15455 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15457 case OPC_ADDU_S_QB
:
15459 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15463 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15465 case OPC_ADDU_S_PH
:
15467 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15471 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15473 case OPC_SUBQ_S_PH
:
15475 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15479 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15483 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15485 case OPC_SUBU_S_QB
:
15487 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15491 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15493 case OPC_SUBU_S_PH
:
15495 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15499 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15503 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15507 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15509 case OPC_RADDU_W_QB
:
15511 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15515 case OPC_CMPU_EQ_QB_DSP
:
15517 case OPC_PRECR_QB_PH
:
15519 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15521 case OPC_PRECRQ_QB_PH
:
15523 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15525 case OPC_PRECR_SRA_PH_W
:
15528 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15529 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15531 tcg_temp_free_i32(sa_t
);
15534 case OPC_PRECR_SRA_R_PH_W
:
15537 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15538 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15540 tcg_temp_free_i32(sa_t
);
15543 case OPC_PRECRQ_PH_W
:
15545 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15547 case OPC_PRECRQ_RS_PH_W
:
15549 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15551 case OPC_PRECRQU_S_QB_PH
:
15553 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15557 #ifdef TARGET_MIPS64
15558 case OPC_ABSQ_S_QH_DSP
:
15560 case OPC_PRECEQ_L_PWL
:
15562 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15564 case OPC_PRECEQ_L_PWR
:
15566 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15568 case OPC_PRECEQ_PW_QHL
:
15570 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15572 case OPC_PRECEQ_PW_QHR
:
15574 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15576 case OPC_PRECEQ_PW_QHLA
:
15578 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15580 case OPC_PRECEQ_PW_QHRA
:
15582 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15584 case OPC_PRECEQU_QH_OBL
:
15586 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15588 case OPC_PRECEQU_QH_OBR
:
15590 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15592 case OPC_PRECEQU_QH_OBLA
:
15594 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15596 case OPC_PRECEQU_QH_OBRA
:
15598 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15600 case OPC_PRECEU_QH_OBL
:
15602 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15604 case OPC_PRECEU_QH_OBR
:
15606 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15608 case OPC_PRECEU_QH_OBLA
:
15610 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15612 case OPC_PRECEU_QH_OBRA
:
15614 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15616 case OPC_ABSQ_S_OB
:
15618 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15620 case OPC_ABSQ_S_PW
:
15622 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15624 case OPC_ABSQ_S_QH
:
15626 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15630 case OPC_ADDU_OB_DSP
:
15632 case OPC_RADDU_L_OB
:
15634 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15638 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15640 case OPC_SUBQ_S_PW
:
15642 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15646 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15648 case OPC_SUBQ_S_QH
:
15650 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15654 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15656 case OPC_SUBU_S_OB
:
15658 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15662 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15664 case OPC_SUBU_S_QH
:
15666 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15670 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15672 case OPC_SUBUH_R_OB
:
15674 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15678 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15680 case OPC_ADDQ_S_PW
:
15682 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15686 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15688 case OPC_ADDQ_S_QH
:
15690 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15694 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15696 case OPC_ADDU_S_OB
:
15698 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15702 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15704 case OPC_ADDU_S_QH
:
15706 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15710 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15712 case OPC_ADDUH_R_OB
:
15714 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15718 case OPC_CMPU_EQ_OB_DSP
:
15720 case OPC_PRECR_OB_QH
:
15722 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15724 case OPC_PRECR_SRA_QH_PW
:
15727 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15728 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15729 tcg_temp_free_i32(ret_t
);
15732 case OPC_PRECR_SRA_R_QH_PW
:
15735 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15736 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15737 tcg_temp_free_i32(sa_v
);
15740 case OPC_PRECRQ_OB_QH
:
15742 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15744 case OPC_PRECRQ_PW_L
:
15746 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15748 case OPC_PRECRQ_QH_PW
:
15750 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15752 case OPC_PRECRQ_RS_QH_PW
:
15754 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15756 case OPC_PRECRQU_S_OB_QH
:
15758 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15765 tcg_temp_free(v1_t
);
15766 tcg_temp_free(v2_t
);
15769 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15770 int ret
, int v1
, int v2
)
15778 /* Treat as NOP. */
15782 t0
= tcg_temp_new();
15783 v1_t
= tcg_temp_new();
15784 v2_t
= tcg_temp_new();
15786 tcg_gen_movi_tl(t0
, v1
);
15787 gen_load_gpr(v1_t
, v1
);
15788 gen_load_gpr(v2_t
, v2
);
15791 case OPC_SHLL_QB_DSP
:
15793 op2
= MASK_SHLL_QB(ctx
->opcode
);
15797 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15801 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15805 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15809 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15811 case OPC_SHLL_S_PH
:
15813 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15815 case OPC_SHLLV_S_PH
:
15817 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15821 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15823 case OPC_SHLLV_S_W
:
15825 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15829 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15833 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15837 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15841 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15845 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15847 case OPC_SHRA_R_QB
:
15849 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15853 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15855 case OPC_SHRAV_R_QB
:
15857 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15861 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15863 case OPC_SHRA_R_PH
:
15865 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15869 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15871 case OPC_SHRAV_R_PH
:
15873 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15877 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15879 case OPC_SHRAV_R_W
:
15881 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15883 default: /* Invalid */
15884 MIPS_INVAL("MASK SHLL.QB");
15885 generate_exception_end(ctx
, EXCP_RI
);
15890 #ifdef TARGET_MIPS64
15891 case OPC_SHLL_OB_DSP
:
15892 op2
= MASK_SHLL_OB(ctx
->opcode
);
15896 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15900 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15902 case OPC_SHLL_S_PW
:
15904 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15906 case OPC_SHLLV_S_PW
:
15908 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15912 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15916 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15920 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15924 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15926 case OPC_SHLL_S_QH
:
15928 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15930 case OPC_SHLLV_S_QH
:
15932 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15936 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15940 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15942 case OPC_SHRA_R_OB
:
15944 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15946 case OPC_SHRAV_R_OB
:
15948 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15952 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15956 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15958 case OPC_SHRA_R_PW
:
15960 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15962 case OPC_SHRAV_R_PW
:
15964 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15968 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15972 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15974 case OPC_SHRA_R_QH
:
15976 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15978 case OPC_SHRAV_R_QH
:
15980 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15984 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15988 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15992 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15996 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15998 default: /* Invalid */
15999 MIPS_INVAL("MASK SHLL.OB");
16000 generate_exception_end(ctx
, EXCP_RI
);
16008 tcg_temp_free(v1_t
);
16009 tcg_temp_free(v2_t
);
16012 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16013 int ret
, int v1
, int v2
, int check_ret
)
16019 if ((ret
== 0) && (check_ret
== 1)) {
16020 /* Treat as NOP. */
16024 t0
= tcg_temp_new_i32();
16025 v1_t
= tcg_temp_new();
16026 v2_t
= tcg_temp_new();
16028 tcg_gen_movi_i32(t0
, ret
);
16029 gen_load_gpr(v1_t
, v1
);
16030 gen_load_gpr(v2_t
, v2
);
16033 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16034 * the same mask and op1. */
16035 case OPC_MULT_G_2E
:
16039 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16042 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16045 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16047 case OPC_MULQ_RS_W
:
16048 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16052 case OPC_DPA_W_PH_DSP
:
16054 case OPC_DPAU_H_QBL
:
16056 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16058 case OPC_DPAU_H_QBR
:
16060 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16062 case OPC_DPSU_H_QBL
:
16064 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16066 case OPC_DPSU_H_QBR
:
16068 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16072 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16074 case OPC_DPAX_W_PH
:
16076 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16078 case OPC_DPAQ_S_W_PH
:
16080 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16082 case OPC_DPAQX_S_W_PH
:
16084 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16086 case OPC_DPAQX_SA_W_PH
:
16088 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16092 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16094 case OPC_DPSX_W_PH
:
16096 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16098 case OPC_DPSQ_S_W_PH
:
16100 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16102 case OPC_DPSQX_S_W_PH
:
16104 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16106 case OPC_DPSQX_SA_W_PH
:
16108 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16110 case OPC_MULSAQ_S_W_PH
:
16112 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16114 case OPC_DPAQ_SA_L_W
:
16116 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16118 case OPC_DPSQ_SA_L_W
:
16120 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16122 case OPC_MAQ_S_W_PHL
:
16124 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16126 case OPC_MAQ_S_W_PHR
:
16128 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16130 case OPC_MAQ_SA_W_PHL
:
16132 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16134 case OPC_MAQ_SA_W_PHR
:
16136 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16138 case OPC_MULSA_W_PH
:
16140 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16144 #ifdef TARGET_MIPS64
16145 case OPC_DPAQ_W_QH_DSP
:
16147 int ac
= ret
& 0x03;
16148 tcg_gen_movi_i32(t0
, ac
);
16153 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16157 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16161 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16165 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16169 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16171 case OPC_DPAQ_S_W_QH
:
16173 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16175 case OPC_DPAQ_SA_L_PW
:
16177 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16179 case OPC_DPAU_H_OBL
:
16181 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16183 case OPC_DPAU_H_OBR
:
16185 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16189 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16191 case OPC_DPSQ_S_W_QH
:
16193 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16195 case OPC_DPSQ_SA_L_PW
:
16197 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16199 case OPC_DPSU_H_OBL
:
16201 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16203 case OPC_DPSU_H_OBR
:
16205 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16207 case OPC_MAQ_S_L_PWL
:
16209 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16211 case OPC_MAQ_S_L_PWR
:
16213 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16215 case OPC_MAQ_S_W_QHLL
:
16217 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16219 case OPC_MAQ_SA_W_QHLL
:
16221 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16223 case OPC_MAQ_S_W_QHLR
:
16225 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16227 case OPC_MAQ_SA_W_QHLR
:
16229 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16231 case OPC_MAQ_S_W_QHRL
:
16233 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16235 case OPC_MAQ_SA_W_QHRL
:
16237 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16239 case OPC_MAQ_S_W_QHRR
:
16241 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16243 case OPC_MAQ_SA_W_QHRR
:
16245 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16247 case OPC_MULSAQ_S_L_PW
:
16249 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16251 case OPC_MULSAQ_S_W_QH
:
16253 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16259 case OPC_ADDU_QB_DSP
:
16261 case OPC_MULEU_S_PH_QBL
:
16263 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16265 case OPC_MULEU_S_PH_QBR
:
16267 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16269 case OPC_MULQ_RS_PH
:
16271 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16273 case OPC_MULEQ_S_W_PHL
:
16275 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16277 case OPC_MULEQ_S_W_PHR
:
16279 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16281 case OPC_MULQ_S_PH
:
16283 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16287 #ifdef TARGET_MIPS64
16288 case OPC_ADDU_OB_DSP
:
16290 case OPC_MULEQ_S_PW_QHL
:
16292 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16294 case OPC_MULEQ_S_PW_QHR
:
16296 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16298 case OPC_MULEU_S_QH_OBL
:
16300 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16302 case OPC_MULEU_S_QH_OBR
:
16304 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16306 case OPC_MULQ_RS_QH
:
16308 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16315 tcg_temp_free_i32(t0
);
16316 tcg_temp_free(v1_t
);
16317 tcg_temp_free(v2_t
);
16320 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16328 /* Treat as NOP. */
16332 t0
= tcg_temp_new();
16333 val_t
= tcg_temp_new();
16334 gen_load_gpr(val_t
, val
);
16337 case OPC_ABSQ_S_PH_DSP
:
16341 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16346 target_long result
;
16347 imm
= (ctx
->opcode
>> 16) & 0xFF;
16348 result
= (uint32_t)imm
<< 24 |
16349 (uint32_t)imm
<< 16 |
16350 (uint32_t)imm
<< 8 |
16352 result
= (int32_t)result
;
16353 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16358 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16359 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16360 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16361 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16362 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16363 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16368 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16369 imm
= (int16_t)(imm
<< 6) >> 6;
16370 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16371 (target_long
)((int32_t)imm
<< 16 | \
16377 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16378 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16379 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16380 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16384 #ifdef TARGET_MIPS64
16385 case OPC_ABSQ_S_QH_DSP
:
16392 imm
= (ctx
->opcode
>> 16) & 0xFF;
16393 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16394 temp
= (temp
<< 16) | temp
;
16395 temp
= (temp
<< 32) | temp
;
16396 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16404 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16405 imm
= (int16_t)(imm
<< 6) >> 6;
16406 temp
= ((target_long
)imm
<< 32) \
16407 | ((target_long
)imm
& 0xFFFFFFFF);
16408 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16416 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16417 imm
= (int16_t)(imm
<< 6) >> 6;
16419 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16420 ((uint64_t)(uint16_t)imm
<< 32) |
16421 ((uint64_t)(uint16_t)imm
<< 16) |
16422 (uint64_t)(uint16_t)imm
;
16423 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16428 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16429 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16430 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16431 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16432 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16433 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16434 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16438 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16439 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16440 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16444 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16445 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16446 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16447 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16448 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16455 tcg_temp_free(val_t
);
16458 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16459 uint32_t op1
, uint32_t op2
,
16460 int ret
, int v1
, int v2
, int check_ret
)
16466 if ((ret
== 0) && (check_ret
== 1)) {
16467 /* Treat as NOP. */
16471 t1
= tcg_temp_new();
16472 v1_t
= tcg_temp_new();
16473 v2_t
= tcg_temp_new();
16475 gen_load_gpr(v1_t
, v1
);
16476 gen_load_gpr(v2_t
, v2
);
16479 case OPC_CMPU_EQ_QB_DSP
:
16481 case OPC_CMPU_EQ_QB
:
16483 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16485 case OPC_CMPU_LT_QB
:
16487 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16489 case OPC_CMPU_LE_QB
:
16491 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16493 case OPC_CMPGU_EQ_QB
:
16495 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16497 case OPC_CMPGU_LT_QB
:
16499 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16501 case OPC_CMPGU_LE_QB
:
16503 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16505 case OPC_CMPGDU_EQ_QB
:
16507 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16508 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16509 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16510 tcg_gen_shli_tl(t1
, t1
, 24);
16511 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16513 case OPC_CMPGDU_LT_QB
:
16515 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16516 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16517 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16518 tcg_gen_shli_tl(t1
, t1
, 24);
16519 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16521 case OPC_CMPGDU_LE_QB
:
16523 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16524 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16525 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16526 tcg_gen_shli_tl(t1
, t1
, 24);
16527 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16529 case OPC_CMP_EQ_PH
:
16531 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16533 case OPC_CMP_LT_PH
:
16535 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16537 case OPC_CMP_LE_PH
:
16539 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16543 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16547 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16549 case OPC_PACKRL_PH
:
16551 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16555 #ifdef TARGET_MIPS64
16556 case OPC_CMPU_EQ_OB_DSP
:
16558 case OPC_CMP_EQ_PW
:
16560 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16562 case OPC_CMP_LT_PW
:
16564 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16566 case OPC_CMP_LE_PW
:
16568 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16570 case OPC_CMP_EQ_QH
:
16572 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16574 case OPC_CMP_LT_QH
:
16576 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16578 case OPC_CMP_LE_QH
:
16580 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16582 case OPC_CMPGDU_EQ_OB
:
16584 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16586 case OPC_CMPGDU_LT_OB
:
16588 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16590 case OPC_CMPGDU_LE_OB
:
16592 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16594 case OPC_CMPGU_EQ_OB
:
16596 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16598 case OPC_CMPGU_LT_OB
:
16600 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16602 case OPC_CMPGU_LE_OB
:
16604 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16606 case OPC_CMPU_EQ_OB
:
16608 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16610 case OPC_CMPU_LT_OB
:
16612 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16614 case OPC_CMPU_LE_OB
:
16616 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16618 case OPC_PACKRL_PW
:
16620 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16624 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16628 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16632 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16640 tcg_temp_free(v1_t
);
16641 tcg_temp_free(v2_t
);
16644 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16645 uint32_t op1
, int rt
, int rs
, int sa
)
16652 /* Treat as NOP. */
16656 t0
= tcg_temp_new();
16657 gen_load_gpr(t0
, rs
);
16660 case OPC_APPEND_DSP
:
16661 switch (MASK_APPEND(ctx
->opcode
)) {
16664 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16666 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16670 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16671 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16672 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16673 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16675 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16679 if (sa
!= 0 && sa
!= 2) {
16680 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16681 tcg_gen_ext32u_tl(t0
, t0
);
16682 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16683 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16685 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16687 default: /* Invalid */
16688 MIPS_INVAL("MASK APPEND");
16689 generate_exception_end(ctx
, EXCP_RI
);
16693 #ifdef TARGET_MIPS64
16694 case OPC_DAPPEND_DSP
:
16695 switch (MASK_DAPPEND(ctx
->opcode
)) {
16698 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16702 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16703 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16704 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16708 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16709 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16710 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16715 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16716 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16717 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16718 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16721 default: /* Invalid */
16722 MIPS_INVAL("MASK DAPPEND");
16723 generate_exception_end(ctx
, EXCP_RI
);
16732 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16733 int ret
, int v1
, int v2
, int check_ret
)
16742 if ((ret
== 0) && (check_ret
== 1)) {
16743 /* Treat as NOP. */
16747 t0
= tcg_temp_new();
16748 t1
= tcg_temp_new();
16749 v1_t
= tcg_temp_new();
16750 v2_t
= tcg_temp_new();
16752 gen_load_gpr(v1_t
, v1
);
16753 gen_load_gpr(v2_t
, v2
);
16756 case OPC_EXTR_W_DSP
:
16760 tcg_gen_movi_tl(t0
, v2
);
16761 tcg_gen_movi_tl(t1
, v1
);
16762 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16765 tcg_gen_movi_tl(t0
, v2
);
16766 tcg_gen_movi_tl(t1
, v1
);
16767 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16769 case OPC_EXTR_RS_W
:
16770 tcg_gen_movi_tl(t0
, v2
);
16771 tcg_gen_movi_tl(t1
, v1
);
16772 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16775 tcg_gen_movi_tl(t0
, v2
);
16776 tcg_gen_movi_tl(t1
, v1
);
16777 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16779 case OPC_EXTRV_S_H
:
16780 tcg_gen_movi_tl(t0
, v2
);
16781 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16784 tcg_gen_movi_tl(t0
, v2
);
16785 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16787 case OPC_EXTRV_R_W
:
16788 tcg_gen_movi_tl(t0
, v2
);
16789 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16791 case OPC_EXTRV_RS_W
:
16792 tcg_gen_movi_tl(t0
, v2
);
16793 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16796 tcg_gen_movi_tl(t0
, v2
);
16797 tcg_gen_movi_tl(t1
, v1
);
16798 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16801 tcg_gen_movi_tl(t0
, v2
);
16802 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16805 tcg_gen_movi_tl(t0
, v2
);
16806 tcg_gen_movi_tl(t1
, v1
);
16807 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16810 tcg_gen_movi_tl(t0
, v2
);
16811 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16814 imm
= (ctx
->opcode
>> 20) & 0x3F;
16815 tcg_gen_movi_tl(t0
, ret
);
16816 tcg_gen_movi_tl(t1
, imm
);
16817 gen_helper_shilo(t0
, t1
, cpu_env
);
16820 tcg_gen_movi_tl(t0
, ret
);
16821 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16824 tcg_gen_movi_tl(t0
, ret
);
16825 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16828 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16829 tcg_gen_movi_tl(t0
, imm
);
16830 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16833 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16834 tcg_gen_movi_tl(t0
, imm
);
16835 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16839 #ifdef TARGET_MIPS64
16840 case OPC_DEXTR_W_DSP
:
16844 tcg_gen_movi_tl(t0
, ret
);
16845 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16849 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16850 int ac
= (ctx
->opcode
>> 11) & 0x03;
16851 tcg_gen_movi_tl(t0
, shift
);
16852 tcg_gen_movi_tl(t1
, ac
);
16853 gen_helper_dshilo(t0
, t1
, cpu_env
);
16858 int ac
= (ctx
->opcode
>> 11) & 0x03;
16859 tcg_gen_movi_tl(t0
, ac
);
16860 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16864 tcg_gen_movi_tl(t0
, v2
);
16865 tcg_gen_movi_tl(t1
, v1
);
16867 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16870 tcg_gen_movi_tl(t0
, v2
);
16871 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16874 tcg_gen_movi_tl(t0
, v2
);
16875 tcg_gen_movi_tl(t1
, v1
);
16876 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16879 tcg_gen_movi_tl(t0
, v2
);
16880 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16883 tcg_gen_movi_tl(t0
, v2
);
16884 tcg_gen_movi_tl(t1
, v1
);
16885 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16887 case OPC_DEXTR_R_L
:
16888 tcg_gen_movi_tl(t0
, v2
);
16889 tcg_gen_movi_tl(t1
, v1
);
16890 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16892 case OPC_DEXTR_RS_L
:
16893 tcg_gen_movi_tl(t0
, v2
);
16894 tcg_gen_movi_tl(t1
, v1
);
16895 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16898 tcg_gen_movi_tl(t0
, v2
);
16899 tcg_gen_movi_tl(t1
, v1
);
16900 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16902 case OPC_DEXTR_R_W
:
16903 tcg_gen_movi_tl(t0
, v2
);
16904 tcg_gen_movi_tl(t1
, v1
);
16905 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16907 case OPC_DEXTR_RS_W
:
16908 tcg_gen_movi_tl(t0
, v2
);
16909 tcg_gen_movi_tl(t1
, v1
);
16910 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16912 case OPC_DEXTR_S_H
:
16913 tcg_gen_movi_tl(t0
, v2
);
16914 tcg_gen_movi_tl(t1
, v1
);
16915 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16917 case OPC_DEXTRV_S_H
:
16918 tcg_gen_movi_tl(t0
, v2
);
16919 tcg_gen_movi_tl(t1
, v1
);
16920 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16923 tcg_gen_movi_tl(t0
, v2
);
16924 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16926 case OPC_DEXTRV_R_L
:
16927 tcg_gen_movi_tl(t0
, v2
);
16928 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16930 case OPC_DEXTRV_RS_L
:
16931 tcg_gen_movi_tl(t0
, v2
);
16932 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16935 tcg_gen_movi_tl(t0
, v2
);
16936 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16938 case OPC_DEXTRV_R_W
:
16939 tcg_gen_movi_tl(t0
, v2
);
16940 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16942 case OPC_DEXTRV_RS_W
:
16943 tcg_gen_movi_tl(t0
, v2
);
16944 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16953 tcg_temp_free(v1_t
);
16954 tcg_temp_free(v2_t
);
16957 /* End MIPSDSP functions. */
16959 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16961 int rs
, rt
, rd
, sa
;
16964 rs
= (ctx
->opcode
>> 21) & 0x1f;
16965 rt
= (ctx
->opcode
>> 16) & 0x1f;
16966 rd
= (ctx
->opcode
>> 11) & 0x1f;
16967 sa
= (ctx
->opcode
>> 6) & 0x1f;
16969 op1
= MASK_SPECIAL(ctx
->opcode
);
16972 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16974 case OPC_MULT
... OPC_DIVU
:
16975 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16985 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16988 MIPS_INVAL("special_r6 muldiv");
16989 generate_exception_end(ctx
, EXCP_RI
);
16995 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16999 if (rt
== 0 && sa
== 1) {
17000 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17001 We need additionally to check other fields */
17002 gen_cl(ctx
, op1
, rd
, rs
);
17004 generate_exception_end(ctx
, EXCP_RI
);
17008 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17009 gen_helper_do_semihosting(cpu_env
);
17011 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17012 generate_exception_end(ctx
, EXCP_RI
);
17014 generate_exception_end(ctx
, EXCP_DBp
);
17018 #if defined(TARGET_MIPS64)
17020 check_mips_64(ctx
);
17021 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17025 if (rt
== 0 && sa
== 1) {
17026 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17027 We need additionally to check other fields */
17028 check_mips_64(ctx
);
17029 gen_cl(ctx
, op1
, rd
, rs
);
17031 generate_exception_end(ctx
, EXCP_RI
);
17034 case OPC_DMULT
... OPC_DDIVU
:
17035 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17045 check_mips_64(ctx
);
17046 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17049 MIPS_INVAL("special_r6 muldiv");
17050 generate_exception_end(ctx
, EXCP_RI
);
17055 default: /* Invalid */
17056 MIPS_INVAL("special_r6");
17057 generate_exception_end(ctx
, EXCP_RI
);
17062 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17064 int rs
, rt
, rd
, sa
;
17067 rs
= (ctx
->opcode
>> 21) & 0x1f;
17068 rt
= (ctx
->opcode
>> 16) & 0x1f;
17069 rd
= (ctx
->opcode
>> 11) & 0x1f;
17070 sa
= (ctx
->opcode
>> 6) & 0x1f;
17072 op1
= MASK_SPECIAL(ctx
->opcode
);
17074 case OPC_MOVN
: /* Conditional move */
17076 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17077 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17078 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17080 case OPC_MFHI
: /* Move from HI/LO */
17082 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17085 case OPC_MTLO
: /* Move to HI/LO */
17086 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17089 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17090 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17091 check_cp1_enabled(ctx
);
17092 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17093 (ctx
->opcode
>> 16) & 1);
17095 generate_exception_err(ctx
, EXCP_CpU
, 1);
17101 check_insn(ctx
, INSN_VR54XX
);
17102 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17103 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17105 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17110 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17112 #if defined(TARGET_MIPS64)
17113 case OPC_DMULT
... OPC_DDIVU
:
17114 check_insn(ctx
, ISA_MIPS3
);
17115 check_mips_64(ctx
);
17116 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17120 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17123 #ifdef MIPS_STRICT_STANDARD
17124 MIPS_INVAL("SPIM");
17125 generate_exception_end(ctx
, EXCP_RI
);
17127 /* Implemented as RI exception for now. */
17128 MIPS_INVAL("spim (unofficial)");
17129 generate_exception_end(ctx
, EXCP_RI
);
17132 default: /* Invalid */
17133 MIPS_INVAL("special_legacy");
17134 generate_exception_end(ctx
, EXCP_RI
);
17139 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17141 int rs
, rt
, rd
, sa
;
17144 rs
= (ctx
->opcode
>> 21) & 0x1f;
17145 rt
= (ctx
->opcode
>> 16) & 0x1f;
17146 rd
= (ctx
->opcode
>> 11) & 0x1f;
17147 sa
= (ctx
->opcode
>> 6) & 0x1f;
17149 op1
= MASK_SPECIAL(ctx
->opcode
);
17151 case OPC_SLL
: /* Shift with immediate */
17152 if (sa
== 5 && rd
== 0 &&
17153 rs
== 0 && rt
== 0) { /* PAUSE */
17154 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17155 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17156 generate_exception_end(ctx
, EXCP_RI
);
17162 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17165 switch ((ctx
->opcode
>> 21) & 0x1f) {
17167 /* rotr is decoded as srl on non-R2 CPUs */
17168 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17173 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17176 generate_exception_end(ctx
, EXCP_RI
);
17180 case OPC_ADD
... OPC_SUBU
:
17181 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17183 case OPC_SLLV
: /* Shifts */
17185 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17188 switch ((ctx
->opcode
>> 6) & 0x1f) {
17190 /* rotrv is decoded as srlv on non-R2 CPUs */
17191 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17196 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17199 generate_exception_end(ctx
, EXCP_RI
);
17203 case OPC_SLT
: /* Set on less than */
17205 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17207 case OPC_AND
: /* Logic*/
17211 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17214 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17216 case OPC_TGE
... OPC_TEQ
: /* Traps */
17218 check_insn(ctx
, ISA_MIPS2
);
17219 gen_trap(ctx
, op1
, rs
, rt
, -1);
17221 case OPC_LSA
: /* OPC_PMON */
17222 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17223 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17224 decode_opc_special_r6(env
, ctx
);
17226 /* Pmon entry point, also R4010 selsl */
17227 #ifdef MIPS_STRICT_STANDARD
17228 MIPS_INVAL("PMON / selsl");
17229 generate_exception_end(ctx
, EXCP_RI
);
17231 gen_helper_0e0i(pmon
, sa
);
17236 generate_exception_end(ctx
, EXCP_SYSCALL
);
17239 generate_exception_end(ctx
, EXCP_BREAK
);
17242 check_insn(ctx
, ISA_MIPS2
);
17243 gen_sync(extract32(ctx
->opcode
, 6, 5));
17246 #if defined(TARGET_MIPS64)
17247 /* MIPS64 specific opcodes */
17252 check_insn(ctx
, ISA_MIPS3
);
17253 check_mips_64(ctx
);
17254 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17257 switch ((ctx
->opcode
>> 21) & 0x1f) {
17259 /* drotr is decoded as dsrl on non-R2 CPUs */
17260 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17265 check_insn(ctx
, ISA_MIPS3
);
17266 check_mips_64(ctx
);
17267 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17270 generate_exception_end(ctx
, EXCP_RI
);
17275 switch ((ctx
->opcode
>> 21) & 0x1f) {
17277 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17278 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17283 check_insn(ctx
, ISA_MIPS3
);
17284 check_mips_64(ctx
);
17285 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17288 generate_exception_end(ctx
, EXCP_RI
);
17292 case OPC_DADD
... OPC_DSUBU
:
17293 check_insn(ctx
, ISA_MIPS3
);
17294 check_mips_64(ctx
);
17295 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17299 check_insn(ctx
, ISA_MIPS3
);
17300 check_mips_64(ctx
);
17301 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17304 switch ((ctx
->opcode
>> 6) & 0x1f) {
17306 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17307 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17312 check_insn(ctx
, ISA_MIPS3
);
17313 check_mips_64(ctx
);
17314 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17317 generate_exception_end(ctx
, EXCP_RI
);
17322 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17323 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17324 decode_opc_special_r6(env
, ctx
);
17329 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17330 decode_opc_special_r6(env
, ctx
);
17332 decode_opc_special_legacy(env
, ctx
);
17337 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17344 rs
= (ctx
->opcode
>> 21) & 0x1f;
17345 rt
= (ctx
->opcode
>> 16) & 0x1f;
17346 rd
= (ctx
->opcode
>> 11) & 0x1f;
17348 op1
= MASK_SPECIAL2(ctx
->opcode
);
17350 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17351 case OPC_MSUB
... OPC_MSUBU
:
17352 check_insn(ctx
, ISA_MIPS32
);
17353 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17356 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17359 case OPC_DIVU_G_2F
:
17360 case OPC_MULT_G_2F
:
17361 case OPC_MULTU_G_2F
:
17363 case OPC_MODU_G_2F
:
17364 check_insn(ctx
, INSN_LOONGSON2F
);
17365 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17369 check_insn(ctx
, ISA_MIPS32
);
17370 gen_cl(ctx
, op1
, rd
, rs
);
17373 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17374 gen_helper_do_semihosting(cpu_env
);
17376 /* XXX: not clear which exception should be raised
17377 * when in debug mode...
17379 check_insn(ctx
, ISA_MIPS32
);
17380 generate_exception_end(ctx
, EXCP_DBp
);
17383 #if defined(TARGET_MIPS64)
17386 check_insn(ctx
, ISA_MIPS64
);
17387 check_mips_64(ctx
);
17388 gen_cl(ctx
, op1
, rd
, rs
);
17390 case OPC_DMULT_G_2F
:
17391 case OPC_DMULTU_G_2F
:
17392 case OPC_DDIV_G_2F
:
17393 case OPC_DDIVU_G_2F
:
17394 case OPC_DMOD_G_2F
:
17395 case OPC_DMODU_G_2F
:
17396 check_insn(ctx
, INSN_LOONGSON2F
);
17397 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17400 default: /* Invalid */
17401 MIPS_INVAL("special2_legacy");
17402 generate_exception_end(ctx
, EXCP_RI
);
17407 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17409 int rs
, rt
, rd
, sa
;
17413 rs
= (ctx
->opcode
>> 21) & 0x1f;
17414 rt
= (ctx
->opcode
>> 16) & 0x1f;
17415 rd
= (ctx
->opcode
>> 11) & 0x1f;
17416 sa
= (ctx
->opcode
>> 6) & 0x1f;
17417 imm
= (int16_t)ctx
->opcode
>> 7;
17419 op1
= MASK_SPECIAL3(ctx
->opcode
);
17423 /* hint codes 24-31 are reserved and signal RI */
17424 generate_exception_end(ctx
, EXCP_RI
);
17426 /* Treat as NOP. */
17429 check_cp0_enabled(ctx
);
17430 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17431 gen_cache_operation(ctx
, rt
, rs
, imm
);
17435 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17438 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17443 /* Treat as NOP. */
17446 op2
= MASK_BSHFL(ctx
->opcode
);
17448 case OPC_ALIGN
... OPC_ALIGN_END
:
17449 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17452 gen_bitswap(ctx
, op2
, rd
, rt
);
17457 #if defined(TARGET_MIPS64)
17459 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17462 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17465 check_mips_64(ctx
);
17468 /* Treat as NOP. */
17471 op2
= MASK_DBSHFL(ctx
->opcode
);
17473 case OPC_DALIGN
... OPC_DALIGN_END
:
17474 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17477 gen_bitswap(ctx
, op2
, rd
, rt
);
17484 default: /* Invalid */
17485 MIPS_INVAL("special3_r6");
17486 generate_exception_end(ctx
, EXCP_RI
);
17491 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17496 rs
= (ctx
->opcode
>> 21) & 0x1f;
17497 rt
= (ctx
->opcode
>> 16) & 0x1f;
17498 rd
= (ctx
->opcode
>> 11) & 0x1f;
17500 op1
= MASK_SPECIAL3(ctx
->opcode
);
17502 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17503 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17504 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17505 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17506 * the same mask and op1. */
17507 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17508 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17511 case OPC_ADDUH_R_QB
:
17513 case OPC_ADDQH_R_PH
:
17515 case OPC_ADDQH_R_W
:
17517 case OPC_SUBUH_R_QB
:
17519 case OPC_SUBQH_R_PH
:
17521 case OPC_SUBQH_R_W
:
17522 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17527 case OPC_MULQ_RS_W
:
17528 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17531 MIPS_INVAL("MASK ADDUH.QB");
17532 generate_exception_end(ctx
, EXCP_RI
);
17535 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17536 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17538 generate_exception_end(ctx
, EXCP_RI
);
17542 op2
= MASK_LX(ctx
->opcode
);
17544 #if defined(TARGET_MIPS64)
17550 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17552 default: /* Invalid */
17553 MIPS_INVAL("MASK LX");
17554 generate_exception_end(ctx
, EXCP_RI
);
17558 case OPC_ABSQ_S_PH_DSP
:
17559 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17561 case OPC_ABSQ_S_QB
:
17562 case OPC_ABSQ_S_PH
:
17564 case OPC_PRECEQ_W_PHL
:
17565 case OPC_PRECEQ_W_PHR
:
17566 case OPC_PRECEQU_PH_QBL
:
17567 case OPC_PRECEQU_PH_QBR
:
17568 case OPC_PRECEQU_PH_QBLA
:
17569 case OPC_PRECEQU_PH_QBRA
:
17570 case OPC_PRECEU_PH_QBL
:
17571 case OPC_PRECEU_PH_QBR
:
17572 case OPC_PRECEU_PH_QBLA
:
17573 case OPC_PRECEU_PH_QBRA
:
17574 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17581 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17584 MIPS_INVAL("MASK ABSQ_S.PH");
17585 generate_exception_end(ctx
, EXCP_RI
);
17589 case OPC_ADDU_QB_DSP
:
17590 op2
= MASK_ADDU_QB(ctx
->opcode
);
17593 case OPC_ADDQ_S_PH
:
17596 case OPC_ADDU_S_QB
:
17598 case OPC_ADDU_S_PH
:
17600 case OPC_SUBQ_S_PH
:
17603 case OPC_SUBU_S_QB
:
17605 case OPC_SUBU_S_PH
:
17609 case OPC_RADDU_W_QB
:
17610 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17612 case OPC_MULEU_S_PH_QBL
:
17613 case OPC_MULEU_S_PH_QBR
:
17614 case OPC_MULQ_RS_PH
:
17615 case OPC_MULEQ_S_W_PHL
:
17616 case OPC_MULEQ_S_W_PHR
:
17617 case OPC_MULQ_S_PH
:
17618 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17620 default: /* Invalid */
17621 MIPS_INVAL("MASK ADDU.QB");
17622 generate_exception_end(ctx
, EXCP_RI
);
17627 case OPC_CMPU_EQ_QB_DSP
:
17628 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17630 case OPC_PRECR_SRA_PH_W
:
17631 case OPC_PRECR_SRA_R_PH_W
:
17632 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17634 case OPC_PRECR_QB_PH
:
17635 case OPC_PRECRQ_QB_PH
:
17636 case OPC_PRECRQ_PH_W
:
17637 case OPC_PRECRQ_RS_PH_W
:
17638 case OPC_PRECRQU_S_QB_PH
:
17639 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17641 case OPC_CMPU_EQ_QB
:
17642 case OPC_CMPU_LT_QB
:
17643 case OPC_CMPU_LE_QB
:
17644 case OPC_CMP_EQ_PH
:
17645 case OPC_CMP_LT_PH
:
17646 case OPC_CMP_LE_PH
:
17647 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17649 case OPC_CMPGU_EQ_QB
:
17650 case OPC_CMPGU_LT_QB
:
17651 case OPC_CMPGU_LE_QB
:
17652 case OPC_CMPGDU_EQ_QB
:
17653 case OPC_CMPGDU_LT_QB
:
17654 case OPC_CMPGDU_LE_QB
:
17657 case OPC_PACKRL_PH
:
17658 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17660 default: /* Invalid */
17661 MIPS_INVAL("MASK CMPU.EQ.QB");
17662 generate_exception_end(ctx
, EXCP_RI
);
17666 case OPC_SHLL_QB_DSP
:
17667 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17669 case OPC_DPA_W_PH_DSP
:
17670 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17672 case OPC_DPAU_H_QBL
:
17673 case OPC_DPAU_H_QBR
:
17674 case OPC_DPSU_H_QBL
:
17675 case OPC_DPSU_H_QBR
:
17677 case OPC_DPAX_W_PH
:
17678 case OPC_DPAQ_S_W_PH
:
17679 case OPC_DPAQX_S_W_PH
:
17680 case OPC_DPAQX_SA_W_PH
:
17682 case OPC_DPSX_W_PH
:
17683 case OPC_DPSQ_S_W_PH
:
17684 case OPC_DPSQX_S_W_PH
:
17685 case OPC_DPSQX_SA_W_PH
:
17686 case OPC_MULSAQ_S_W_PH
:
17687 case OPC_DPAQ_SA_L_W
:
17688 case OPC_DPSQ_SA_L_W
:
17689 case OPC_MAQ_S_W_PHL
:
17690 case OPC_MAQ_S_W_PHR
:
17691 case OPC_MAQ_SA_W_PHL
:
17692 case OPC_MAQ_SA_W_PHR
:
17693 case OPC_MULSA_W_PH
:
17694 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17696 default: /* Invalid */
17697 MIPS_INVAL("MASK DPAW.PH");
17698 generate_exception_end(ctx
, EXCP_RI
);
17703 op2
= MASK_INSV(ctx
->opcode
);
17714 t0
= tcg_temp_new();
17715 t1
= tcg_temp_new();
17717 gen_load_gpr(t0
, rt
);
17718 gen_load_gpr(t1
, rs
);
17720 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17726 default: /* Invalid */
17727 MIPS_INVAL("MASK INSV");
17728 generate_exception_end(ctx
, EXCP_RI
);
17732 case OPC_APPEND_DSP
:
17733 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17735 case OPC_EXTR_W_DSP
:
17736 op2
= MASK_EXTR_W(ctx
->opcode
);
17740 case OPC_EXTR_RS_W
:
17742 case OPC_EXTRV_S_H
:
17744 case OPC_EXTRV_R_W
:
17745 case OPC_EXTRV_RS_W
:
17750 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17753 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17759 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17761 default: /* Invalid */
17762 MIPS_INVAL("MASK EXTR.W");
17763 generate_exception_end(ctx
, EXCP_RI
);
17767 #if defined(TARGET_MIPS64)
17768 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17769 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17770 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17771 check_insn(ctx
, INSN_LOONGSON2E
);
17772 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17774 case OPC_ABSQ_S_QH_DSP
:
17775 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17777 case OPC_PRECEQ_L_PWL
:
17778 case OPC_PRECEQ_L_PWR
:
17779 case OPC_PRECEQ_PW_QHL
:
17780 case OPC_PRECEQ_PW_QHR
:
17781 case OPC_PRECEQ_PW_QHLA
:
17782 case OPC_PRECEQ_PW_QHRA
:
17783 case OPC_PRECEQU_QH_OBL
:
17784 case OPC_PRECEQU_QH_OBR
:
17785 case OPC_PRECEQU_QH_OBLA
:
17786 case OPC_PRECEQU_QH_OBRA
:
17787 case OPC_PRECEU_QH_OBL
:
17788 case OPC_PRECEU_QH_OBR
:
17789 case OPC_PRECEU_QH_OBLA
:
17790 case OPC_PRECEU_QH_OBRA
:
17791 case OPC_ABSQ_S_OB
:
17792 case OPC_ABSQ_S_PW
:
17793 case OPC_ABSQ_S_QH
:
17794 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17802 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17804 default: /* Invalid */
17805 MIPS_INVAL("MASK ABSQ_S.QH");
17806 generate_exception_end(ctx
, EXCP_RI
);
17810 case OPC_ADDU_OB_DSP
:
17811 op2
= MASK_ADDU_OB(ctx
->opcode
);
17813 case OPC_RADDU_L_OB
:
17815 case OPC_SUBQ_S_PW
:
17817 case OPC_SUBQ_S_QH
:
17819 case OPC_SUBU_S_OB
:
17821 case OPC_SUBU_S_QH
:
17823 case OPC_SUBUH_R_OB
:
17825 case OPC_ADDQ_S_PW
:
17827 case OPC_ADDQ_S_QH
:
17829 case OPC_ADDU_S_OB
:
17831 case OPC_ADDU_S_QH
:
17833 case OPC_ADDUH_R_OB
:
17834 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17836 case OPC_MULEQ_S_PW_QHL
:
17837 case OPC_MULEQ_S_PW_QHR
:
17838 case OPC_MULEU_S_QH_OBL
:
17839 case OPC_MULEU_S_QH_OBR
:
17840 case OPC_MULQ_RS_QH
:
17841 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17843 default: /* Invalid */
17844 MIPS_INVAL("MASK ADDU.OB");
17845 generate_exception_end(ctx
, EXCP_RI
);
17849 case OPC_CMPU_EQ_OB_DSP
:
17850 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17852 case OPC_PRECR_SRA_QH_PW
:
17853 case OPC_PRECR_SRA_R_QH_PW
:
17854 /* Return value is rt. */
17855 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17857 case OPC_PRECR_OB_QH
:
17858 case OPC_PRECRQ_OB_QH
:
17859 case OPC_PRECRQ_PW_L
:
17860 case OPC_PRECRQ_QH_PW
:
17861 case OPC_PRECRQ_RS_QH_PW
:
17862 case OPC_PRECRQU_S_OB_QH
:
17863 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17865 case OPC_CMPU_EQ_OB
:
17866 case OPC_CMPU_LT_OB
:
17867 case OPC_CMPU_LE_OB
:
17868 case OPC_CMP_EQ_QH
:
17869 case OPC_CMP_LT_QH
:
17870 case OPC_CMP_LE_QH
:
17871 case OPC_CMP_EQ_PW
:
17872 case OPC_CMP_LT_PW
:
17873 case OPC_CMP_LE_PW
:
17874 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17876 case OPC_CMPGDU_EQ_OB
:
17877 case OPC_CMPGDU_LT_OB
:
17878 case OPC_CMPGDU_LE_OB
:
17879 case OPC_CMPGU_EQ_OB
:
17880 case OPC_CMPGU_LT_OB
:
17881 case OPC_CMPGU_LE_OB
:
17882 case OPC_PACKRL_PW
:
17886 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17888 default: /* Invalid */
17889 MIPS_INVAL("MASK CMPU_EQ.OB");
17890 generate_exception_end(ctx
, EXCP_RI
);
17894 case OPC_DAPPEND_DSP
:
17895 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17897 case OPC_DEXTR_W_DSP
:
17898 op2
= MASK_DEXTR_W(ctx
->opcode
);
17905 case OPC_DEXTR_R_L
:
17906 case OPC_DEXTR_RS_L
:
17908 case OPC_DEXTR_R_W
:
17909 case OPC_DEXTR_RS_W
:
17910 case OPC_DEXTR_S_H
:
17912 case OPC_DEXTRV_R_L
:
17913 case OPC_DEXTRV_RS_L
:
17914 case OPC_DEXTRV_S_H
:
17916 case OPC_DEXTRV_R_W
:
17917 case OPC_DEXTRV_RS_W
:
17918 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17923 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17925 default: /* Invalid */
17926 MIPS_INVAL("MASK EXTR.W");
17927 generate_exception_end(ctx
, EXCP_RI
);
17931 case OPC_DPAQ_W_QH_DSP
:
17932 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17934 case OPC_DPAU_H_OBL
:
17935 case OPC_DPAU_H_OBR
:
17936 case OPC_DPSU_H_OBL
:
17937 case OPC_DPSU_H_OBR
:
17939 case OPC_DPAQ_S_W_QH
:
17941 case OPC_DPSQ_S_W_QH
:
17942 case OPC_MULSAQ_S_W_QH
:
17943 case OPC_DPAQ_SA_L_PW
:
17944 case OPC_DPSQ_SA_L_PW
:
17945 case OPC_MULSAQ_S_L_PW
:
17946 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17948 case OPC_MAQ_S_W_QHLL
:
17949 case OPC_MAQ_S_W_QHLR
:
17950 case OPC_MAQ_S_W_QHRL
:
17951 case OPC_MAQ_S_W_QHRR
:
17952 case OPC_MAQ_SA_W_QHLL
:
17953 case OPC_MAQ_SA_W_QHLR
:
17954 case OPC_MAQ_SA_W_QHRL
:
17955 case OPC_MAQ_SA_W_QHRR
:
17956 case OPC_MAQ_S_L_PWL
:
17957 case OPC_MAQ_S_L_PWR
:
17962 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17964 default: /* Invalid */
17965 MIPS_INVAL("MASK DPAQ.W.QH");
17966 generate_exception_end(ctx
, EXCP_RI
);
17970 case OPC_DINSV_DSP
:
17971 op2
= MASK_INSV(ctx
->opcode
);
17982 t0
= tcg_temp_new();
17983 t1
= tcg_temp_new();
17985 gen_load_gpr(t0
, rt
);
17986 gen_load_gpr(t1
, rs
);
17988 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17994 default: /* Invalid */
17995 MIPS_INVAL("MASK DINSV");
17996 generate_exception_end(ctx
, EXCP_RI
);
18000 case OPC_SHLL_OB_DSP
:
18001 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18004 default: /* Invalid */
18005 MIPS_INVAL("special3_legacy");
18006 generate_exception_end(ctx
, EXCP_RI
);
18011 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18013 int rs
, rt
, rd
, sa
;
18016 rs
= (ctx
->opcode
>> 21) & 0x1f;
18017 rt
= (ctx
->opcode
>> 16) & 0x1f;
18018 rd
= (ctx
->opcode
>> 11) & 0x1f;
18019 sa
= (ctx
->opcode
>> 6) & 0x1f;
18021 op1
= MASK_SPECIAL3(ctx
->opcode
);
18025 check_insn(ctx
, ISA_MIPS32R2
);
18026 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18029 op2
= MASK_BSHFL(ctx
->opcode
);
18031 case OPC_ALIGN
... OPC_ALIGN_END
:
18033 check_insn(ctx
, ISA_MIPS32R6
);
18034 decode_opc_special3_r6(env
, ctx
);
18037 check_insn(ctx
, ISA_MIPS32R2
);
18038 gen_bshfl(ctx
, op2
, rt
, rd
);
18042 #if defined(TARGET_MIPS64)
18043 case OPC_DEXTM
... OPC_DEXT
:
18044 case OPC_DINSM
... OPC_DINS
:
18045 check_insn(ctx
, ISA_MIPS64R2
);
18046 check_mips_64(ctx
);
18047 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18050 op2
= MASK_DBSHFL(ctx
->opcode
);
18052 case OPC_DALIGN
... OPC_DALIGN_END
:
18054 check_insn(ctx
, ISA_MIPS32R6
);
18055 decode_opc_special3_r6(env
, ctx
);
18058 check_insn(ctx
, ISA_MIPS64R2
);
18059 check_mips_64(ctx
);
18060 op2
= MASK_DBSHFL(ctx
->opcode
);
18061 gen_bshfl(ctx
, op2
, rt
, rd
);
18067 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18070 check_insn(ctx
, ASE_MT
);
18072 TCGv t0
= tcg_temp_new();
18073 TCGv t1
= tcg_temp_new();
18075 gen_load_gpr(t0
, rt
);
18076 gen_load_gpr(t1
, rs
);
18077 gen_helper_fork(t0
, t1
);
18083 check_insn(ctx
, ASE_MT
);
18085 TCGv t0
= tcg_temp_new();
18087 gen_load_gpr(t0
, rs
);
18088 gen_helper_yield(t0
, cpu_env
, t0
);
18089 gen_store_gpr(t0
, rd
);
18094 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18095 decode_opc_special3_r6(env
, ctx
);
18097 decode_opc_special3_legacy(env
, ctx
);
18102 /* MIPS SIMD Architecture (MSA) */
18103 static inline int check_msa_access(DisasContext
*ctx
)
18105 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18106 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18107 generate_exception_end(ctx
, EXCP_RI
);
18111 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18112 if (ctx
->insn_flags
& ASE_MSA
) {
18113 generate_exception_end(ctx
, EXCP_MSADIS
);
18116 generate_exception_end(ctx
, EXCP_RI
);
18123 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18125 /* generates tcg ops to check if any element is 0 */
18126 /* Note this function only works with MSA_WRLEN = 128 */
18127 uint64_t eval_zero_or_big
= 0;
18128 uint64_t eval_big
= 0;
18129 TCGv_i64 t0
= tcg_temp_new_i64();
18130 TCGv_i64 t1
= tcg_temp_new_i64();
18133 eval_zero_or_big
= 0x0101010101010101ULL
;
18134 eval_big
= 0x8080808080808080ULL
;
18137 eval_zero_or_big
= 0x0001000100010001ULL
;
18138 eval_big
= 0x8000800080008000ULL
;
18141 eval_zero_or_big
= 0x0000000100000001ULL
;
18142 eval_big
= 0x8000000080000000ULL
;
18145 eval_zero_or_big
= 0x0000000000000001ULL
;
18146 eval_big
= 0x8000000000000000ULL
;
18149 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18150 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18151 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18152 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18153 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18154 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18155 tcg_gen_or_i64(t0
, t0
, t1
);
18156 /* if all bits are zero then all elements are not zero */
18157 /* if some bit is non-zero then some element is zero */
18158 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18159 tcg_gen_trunc_i64_tl(tresult
, t0
);
18160 tcg_temp_free_i64(t0
);
18161 tcg_temp_free_i64(t1
);
18164 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18166 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18167 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18168 int64_t s16
= (int16_t)ctx
->opcode
;
18170 check_msa_access(ctx
);
18172 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18173 generate_exception_end(ctx
, EXCP_RI
);
18180 TCGv_i64 t0
= tcg_temp_new_i64();
18181 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18182 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18183 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18184 tcg_gen_trunc_i64_tl(bcond
, t0
);
18185 tcg_temp_free_i64(t0
);
18192 gen_check_zero_element(bcond
, df
, wt
);
18198 gen_check_zero_element(bcond
, df
, wt
);
18199 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18203 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18205 ctx
->hflags
|= MIPS_HFLAG_BC
;
18206 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18209 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18211 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18212 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18213 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18214 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18216 TCGv_i32 twd
= tcg_const_i32(wd
);
18217 TCGv_i32 tws
= tcg_const_i32(ws
);
18218 TCGv_i32 ti8
= tcg_const_i32(i8
);
18220 switch (MASK_MSA_I8(ctx
->opcode
)) {
18222 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18225 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18228 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18231 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18234 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18237 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18240 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18246 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18247 if (df
== DF_DOUBLE
) {
18248 generate_exception_end(ctx
, EXCP_RI
);
18250 TCGv_i32 tdf
= tcg_const_i32(df
);
18251 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18252 tcg_temp_free_i32(tdf
);
18257 MIPS_INVAL("MSA instruction");
18258 generate_exception_end(ctx
, EXCP_RI
);
18262 tcg_temp_free_i32(twd
);
18263 tcg_temp_free_i32(tws
);
18264 tcg_temp_free_i32(ti8
);
18267 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18269 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18270 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18271 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18272 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18273 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18274 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18276 TCGv_i32 tdf
= tcg_const_i32(df
);
18277 TCGv_i32 twd
= tcg_const_i32(wd
);
18278 TCGv_i32 tws
= tcg_const_i32(ws
);
18279 TCGv_i32 timm
= tcg_temp_new_i32();
18280 tcg_gen_movi_i32(timm
, u5
);
18282 switch (MASK_MSA_I5(ctx
->opcode
)) {
18284 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18287 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18289 case OPC_MAXI_S_df
:
18290 tcg_gen_movi_i32(timm
, s5
);
18291 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18293 case OPC_MAXI_U_df
:
18294 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18296 case OPC_MINI_S_df
:
18297 tcg_gen_movi_i32(timm
, s5
);
18298 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18300 case OPC_MINI_U_df
:
18301 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18304 tcg_gen_movi_i32(timm
, s5
);
18305 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18307 case OPC_CLTI_S_df
:
18308 tcg_gen_movi_i32(timm
, s5
);
18309 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18311 case OPC_CLTI_U_df
:
18312 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18314 case OPC_CLEI_S_df
:
18315 tcg_gen_movi_i32(timm
, s5
);
18316 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18318 case OPC_CLEI_U_df
:
18319 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18323 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18324 tcg_gen_movi_i32(timm
, s10
);
18325 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18329 MIPS_INVAL("MSA instruction");
18330 generate_exception_end(ctx
, EXCP_RI
);
18334 tcg_temp_free_i32(tdf
);
18335 tcg_temp_free_i32(twd
);
18336 tcg_temp_free_i32(tws
);
18337 tcg_temp_free_i32(timm
);
18340 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18342 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18343 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18344 uint32_t df
= 0, m
= 0;
18345 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18346 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18353 if ((dfm
& 0x40) == 0x00) {
18356 } else if ((dfm
& 0x60) == 0x40) {
18359 } else if ((dfm
& 0x70) == 0x60) {
18362 } else if ((dfm
& 0x78) == 0x70) {
18366 generate_exception_end(ctx
, EXCP_RI
);
18370 tdf
= tcg_const_i32(df
);
18371 tm
= tcg_const_i32(m
);
18372 twd
= tcg_const_i32(wd
);
18373 tws
= tcg_const_i32(ws
);
18375 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18377 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18380 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18383 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18386 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18389 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18392 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18394 case OPC_BINSLI_df
:
18395 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18397 case OPC_BINSRI_df
:
18398 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18401 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18404 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18407 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18410 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18413 MIPS_INVAL("MSA instruction");
18414 generate_exception_end(ctx
, EXCP_RI
);
18418 tcg_temp_free_i32(tdf
);
18419 tcg_temp_free_i32(tm
);
18420 tcg_temp_free_i32(twd
);
18421 tcg_temp_free_i32(tws
);
18424 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18426 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18427 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18428 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18429 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18430 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18432 TCGv_i32 tdf
= tcg_const_i32(df
);
18433 TCGv_i32 twd
= tcg_const_i32(wd
);
18434 TCGv_i32 tws
= tcg_const_i32(ws
);
18435 TCGv_i32 twt
= tcg_const_i32(wt
);
18437 switch (MASK_MSA_3R(ctx
->opcode
)) {
18439 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18442 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18445 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18448 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18450 case OPC_SUBS_S_df
:
18451 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18454 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18457 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18460 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18463 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18466 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18468 case OPC_ADDS_A_df
:
18469 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18471 case OPC_SUBS_U_df
:
18472 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18475 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18478 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18481 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18484 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18487 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18490 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18492 case OPC_ADDS_S_df
:
18493 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18495 case OPC_SUBSUS_U_df
:
18496 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18499 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18502 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18505 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18508 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18511 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18514 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18516 case OPC_ADDS_U_df
:
18517 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18519 case OPC_SUBSUU_S_df
:
18520 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18523 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18529 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18535 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18537 case OPC_ASUB_S_df
:
18538 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18541 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18544 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18547 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18550 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18553 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18556 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18558 case OPC_ASUB_U_df
:
18559 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18562 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18565 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18568 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18571 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 case OPC_AVER_S_df
:
18574 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18577 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18580 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18583 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18586 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18588 case OPC_AVER_U_df
:
18589 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18592 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18595 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18598 case OPC_DOTP_S_df
:
18599 case OPC_DOTP_U_df
:
18600 case OPC_DPADD_S_df
:
18601 case OPC_DPADD_U_df
:
18602 case OPC_DPSUB_S_df
:
18603 case OPC_HADD_S_df
:
18604 case OPC_DPSUB_U_df
:
18605 case OPC_HADD_U_df
:
18606 case OPC_HSUB_S_df
:
18607 case OPC_HSUB_U_df
:
18608 if (df
== DF_BYTE
) {
18609 generate_exception_end(ctx
, EXCP_RI
);
18612 switch (MASK_MSA_3R(ctx
->opcode
)) {
18613 case OPC_DOTP_S_df
:
18614 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18616 case OPC_DOTP_U_df
:
18617 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18619 case OPC_DPADD_S_df
:
18620 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18622 case OPC_DPADD_U_df
:
18623 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18625 case OPC_DPSUB_S_df
:
18626 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18628 case OPC_HADD_S_df
:
18629 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18631 case OPC_DPSUB_U_df
:
18632 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18634 case OPC_HADD_U_df
:
18635 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18637 case OPC_HSUB_S_df
:
18638 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18640 case OPC_HSUB_U_df
:
18641 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18646 MIPS_INVAL("MSA instruction");
18647 generate_exception_end(ctx
, EXCP_RI
);
18650 tcg_temp_free_i32(twd
);
18651 tcg_temp_free_i32(tws
);
18652 tcg_temp_free_i32(twt
);
18653 tcg_temp_free_i32(tdf
);
18656 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18658 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18659 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18660 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18661 TCGv telm
= tcg_temp_new();
18662 TCGv_i32 tsr
= tcg_const_i32(source
);
18663 TCGv_i32 tdt
= tcg_const_i32(dest
);
18665 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18667 gen_load_gpr(telm
, source
);
18668 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18671 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18672 gen_store_gpr(telm
, dest
);
18675 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18678 MIPS_INVAL("MSA instruction");
18679 generate_exception_end(ctx
, EXCP_RI
);
18683 tcg_temp_free(telm
);
18684 tcg_temp_free_i32(tdt
);
18685 tcg_temp_free_i32(tsr
);
18688 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18691 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18692 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18693 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18695 TCGv_i32 tws
= tcg_const_i32(ws
);
18696 TCGv_i32 twd
= tcg_const_i32(wd
);
18697 TCGv_i32 tn
= tcg_const_i32(n
);
18698 TCGv_i32 tdf
= tcg_const_i32(df
);
18700 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18702 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18704 case OPC_SPLATI_df
:
18705 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18708 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18710 case OPC_COPY_S_df
:
18711 case OPC_COPY_U_df
:
18712 case OPC_INSERT_df
:
18713 #if !defined(TARGET_MIPS64)
18714 /* Double format valid only for MIPS64 */
18715 if (df
== DF_DOUBLE
) {
18716 generate_exception_end(ctx
, EXCP_RI
);
18720 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18721 case OPC_COPY_S_df
:
18722 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18724 case OPC_COPY_U_df
:
18725 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18727 case OPC_INSERT_df
:
18728 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18733 MIPS_INVAL("MSA instruction");
18734 generate_exception_end(ctx
, EXCP_RI
);
18736 tcg_temp_free_i32(twd
);
18737 tcg_temp_free_i32(tws
);
18738 tcg_temp_free_i32(tn
);
18739 tcg_temp_free_i32(tdf
);
18742 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18744 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18745 uint32_t df
= 0, n
= 0;
18747 if ((dfn
& 0x30) == 0x00) {
18750 } else if ((dfn
& 0x38) == 0x20) {
18753 } else if ((dfn
& 0x3c) == 0x30) {
18756 } else if ((dfn
& 0x3e) == 0x38) {
18759 } else if (dfn
== 0x3E) {
18760 /* CTCMSA, CFCMSA, MOVE.V */
18761 gen_msa_elm_3e(env
, ctx
);
18764 generate_exception_end(ctx
, EXCP_RI
);
18768 gen_msa_elm_df(env
, ctx
, df
, n
);
18771 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18773 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18774 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18775 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18776 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18777 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18779 TCGv_i32 twd
= tcg_const_i32(wd
);
18780 TCGv_i32 tws
= tcg_const_i32(ws
);
18781 TCGv_i32 twt
= tcg_const_i32(wt
);
18782 TCGv_i32 tdf
= tcg_temp_new_i32();
18784 /* adjust df value for floating-point instruction */
18785 tcg_gen_movi_i32(tdf
, df
+ 2);
18787 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18789 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18792 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18795 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18798 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18801 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18804 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18807 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18822 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18825 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 tcg_gen_movi_i32(tdf
, df
+ 1);
18829 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18832 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18835 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 case OPC_MADD_Q_df
:
18838 tcg_gen_movi_i32(tdf
, df
+ 1);
18839 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18842 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18844 case OPC_MSUB_Q_df
:
18845 tcg_gen_movi_i32(tdf
, df
+ 1);
18846 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18849 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18852 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18858 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18861 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18864 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18867 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18870 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18873 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18879 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18882 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18885 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18887 case OPC_MULR_Q_df
:
18888 tcg_gen_movi_i32(tdf
, df
+ 1);
18889 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18892 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18894 case OPC_FMIN_A_df
:
18895 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18897 case OPC_MADDR_Q_df
:
18898 tcg_gen_movi_i32(tdf
, df
+ 1);
18899 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18902 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18905 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18907 case OPC_MSUBR_Q_df
:
18908 tcg_gen_movi_i32(tdf
, df
+ 1);
18909 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18912 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18914 case OPC_FMAX_A_df
:
18915 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18918 MIPS_INVAL("MSA instruction");
18919 generate_exception_end(ctx
, EXCP_RI
);
18923 tcg_temp_free_i32(twd
);
18924 tcg_temp_free_i32(tws
);
18925 tcg_temp_free_i32(twt
);
18926 tcg_temp_free_i32(tdf
);
18929 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18931 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18932 (op & (0x7 << 18)))
18933 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18934 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18935 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18936 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18937 TCGv_i32 twd
= tcg_const_i32(wd
);
18938 TCGv_i32 tws
= tcg_const_i32(ws
);
18939 TCGv_i32 twt
= tcg_const_i32(wt
);
18940 TCGv_i32 tdf
= tcg_const_i32(df
);
18942 switch (MASK_MSA_2R(ctx
->opcode
)) {
18944 #if !defined(TARGET_MIPS64)
18945 /* Double format valid only for MIPS64 */
18946 if (df
== DF_DOUBLE
) {
18947 generate_exception_end(ctx
, EXCP_RI
);
18951 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18954 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18957 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18960 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18963 MIPS_INVAL("MSA instruction");
18964 generate_exception_end(ctx
, EXCP_RI
);
18968 tcg_temp_free_i32(twd
);
18969 tcg_temp_free_i32(tws
);
18970 tcg_temp_free_i32(twt
);
18971 tcg_temp_free_i32(tdf
);
18974 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18976 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18977 (op & (0xf << 17)))
18978 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18979 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18980 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18981 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18982 TCGv_i32 twd
= tcg_const_i32(wd
);
18983 TCGv_i32 tws
= tcg_const_i32(ws
);
18984 TCGv_i32 twt
= tcg_const_i32(wt
);
18985 /* adjust df value for floating-point instruction */
18986 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18988 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18989 case OPC_FCLASS_df
:
18990 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18992 case OPC_FTRUNC_S_df
:
18993 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18995 case OPC_FTRUNC_U_df
:
18996 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18999 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19001 case OPC_FRSQRT_df
:
19002 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19005 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19008 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19011 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19013 case OPC_FEXUPL_df
:
19014 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19016 case OPC_FEXUPR_df
:
19017 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19020 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19023 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19025 case OPC_FTINT_S_df
:
19026 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19028 case OPC_FTINT_U_df
:
19029 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19031 case OPC_FFINT_S_df
:
19032 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19034 case OPC_FFINT_U_df
:
19035 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19039 tcg_temp_free_i32(twd
);
19040 tcg_temp_free_i32(tws
);
19041 tcg_temp_free_i32(twt
);
19042 tcg_temp_free_i32(tdf
);
19045 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19047 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19048 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19049 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19050 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19051 TCGv_i32 twd
= tcg_const_i32(wd
);
19052 TCGv_i32 tws
= tcg_const_i32(ws
);
19053 TCGv_i32 twt
= tcg_const_i32(wt
);
19055 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19057 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19060 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19063 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19066 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19069 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19072 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19075 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19078 MIPS_INVAL("MSA instruction");
19079 generate_exception_end(ctx
, EXCP_RI
);
19083 tcg_temp_free_i32(twd
);
19084 tcg_temp_free_i32(tws
);
19085 tcg_temp_free_i32(twt
);
19088 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19090 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19098 gen_msa_vec_v(env
, ctx
);
19101 gen_msa_2r(env
, ctx
);
19104 gen_msa_2rf(env
, ctx
);
19107 MIPS_INVAL("MSA instruction");
19108 generate_exception_end(ctx
, EXCP_RI
);
19113 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19115 uint32_t opcode
= ctx
->opcode
;
19116 check_insn(ctx
, ASE_MSA
);
19117 check_msa_access(ctx
);
19119 switch (MASK_MSA_MINOR(opcode
)) {
19120 case OPC_MSA_I8_00
:
19121 case OPC_MSA_I8_01
:
19122 case OPC_MSA_I8_02
:
19123 gen_msa_i8(env
, ctx
);
19125 case OPC_MSA_I5_06
:
19126 case OPC_MSA_I5_07
:
19127 gen_msa_i5(env
, ctx
);
19129 case OPC_MSA_BIT_09
:
19130 case OPC_MSA_BIT_0A
:
19131 gen_msa_bit(env
, ctx
);
19133 case OPC_MSA_3R_0D
:
19134 case OPC_MSA_3R_0E
:
19135 case OPC_MSA_3R_0F
:
19136 case OPC_MSA_3R_10
:
19137 case OPC_MSA_3R_11
:
19138 case OPC_MSA_3R_12
:
19139 case OPC_MSA_3R_13
:
19140 case OPC_MSA_3R_14
:
19141 case OPC_MSA_3R_15
:
19142 gen_msa_3r(env
, ctx
);
19145 gen_msa_elm(env
, ctx
);
19147 case OPC_MSA_3RF_1A
:
19148 case OPC_MSA_3RF_1B
:
19149 case OPC_MSA_3RF_1C
:
19150 gen_msa_3rf(env
, ctx
);
19153 gen_msa_vec(env
, ctx
);
19164 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19165 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19166 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19167 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19169 TCGv_i32 twd
= tcg_const_i32(wd
);
19170 TCGv taddr
= tcg_temp_new();
19171 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19173 switch (MASK_MSA_MINOR(opcode
)) {
19175 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19178 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19181 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19184 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19187 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19190 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19193 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19196 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19200 tcg_temp_free_i32(twd
);
19201 tcg_temp_free(taddr
);
19205 MIPS_INVAL("MSA instruction");
19206 generate_exception_end(ctx
, EXCP_RI
);
19212 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19215 int rs
, rt
, rd
, sa
;
19219 /* make sure instructions are on a word boundary */
19220 if (ctx
->pc
& 0x3) {
19221 env
->CP0_BadVAddr
= ctx
->pc
;
19222 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19226 /* Handle blikely not taken case */
19227 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19228 TCGLabel
*l1
= gen_new_label();
19230 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19231 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19232 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19236 op
= MASK_OP_MAJOR(ctx
->opcode
);
19237 rs
= (ctx
->opcode
>> 21) & 0x1f;
19238 rt
= (ctx
->opcode
>> 16) & 0x1f;
19239 rd
= (ctx
->opcode
>> 11) & 0x1f;
19240 sa
= (ctx
->opcode
>> 6) & 0x1f;
19241 imm
= (int16_t)ctx
->opcode
;
19244 decode_opc_special(env
, ctx
);
19247 decode_opc_special2_legacy(env
, ctx
);
19250 decode_opc_special3(env
, ctx
);
19253 op1
= MASK_REGIMM(ctx
->opcode
);
19255 case OPC_BLTZL
: /* REGIMM branches */
19259 check_insn(ctx
, ISA_MIPS2
);
19260 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19264 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19268 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19270 /* OPC_NAL, OPC_BAL */
19271 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19273 generate_exception_end(ctx
, EXCP_RI
);
19276 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19279 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19281 check_insn(ctx
, ISA_MIPS2
);
19282 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19283 gen_trap(ctx
, op1
, rs
, -1, imm
);
19286 check_insn(ctx
, ISA_MIPS32R6
);
19287 generate_exception_end(ctx
, EXCP_RI
);
19290 check_insn(ctx
, ISA_MIPS32R2
);
19291 /* Break the TB to be able to sync copied instructions
19293 ctx
->bstate
= BS_STOP
;
19295 case OPC_BPOSGE32
: /* MIPS DSP branch */
19296 #if defined(TARGET_MIPS64)
19300 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19302 #if defined(TARGET_MIPS64)
19304 check_insn(ctx
, ISA_MIPS32R6
);
19305 check_mips_64(ctx
);
19307 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19311 check_insn(ctx
, ISA_MIPS32R6
);
19312 check_mips_64(ctx
);
19314 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19318 default: /* Invalid */
19319 MIPS_INVAL("regimm");
19320 generate_exception_end(ctx
, EXCP_RI
);
19325 check_cp0_enabled(ctx
);
19326 op1
= MASK_CP0(ctx
->opcode
);
19334 #if defined(TARGET_MIPS64)
19338 #ifndef CONFIG_USER_ONLY
19339 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19340 #endif /* !CONFIG_USER_ONLY */
19342 case OPC_C0_FIRST
... OPC_C0_LAST
:
19343 #ifndef CONFIG_USER_ONLY
19344 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19345 #endif /* !CONFIG_USER_ONLY */
19348 #ifndef CONFIG_USER_ONLY
19351 TCGv t0
= tcg_temp_new();
19353 op2
= MASK_MFMC0(ctx
->opcode
);
19356 check_insn(ctx
, ASE_MT
);
19357 gen_helper_dmt(t0
);
19358 gen_store_gpr(t0
, rt
);
19361 check_insn(ctx
, ASE_MT
);
19362 gen_helper_emt(t0
);
19363 gen_store_gpr(t0
, rt
);
19366 check_insn(ctx
, ASE_MT
);
19367 gen_helper_dvpe(t0
, cpu_env
);
19368 gen_store_gpr(t0
, rt
);
19371 check_insn(ctx
, ASE_MT
);
19372 gen_helper_evpe(t0
, cpu_env
);
19373 gen_store_gpr(t0
, rt
);
19376 check_insn(ctx
, ISA_MIPS32R6
);
19378 gen_helper_dvp(t0
, cpu_env
);
19379 gen_store_gpr(t0
, rt
);
19383 check_insn(ctx
, ISA_MIPS32R6
);
19385 gen_helper_evp(t0
, cpu_env
);
19386 gen_store_gpr(t0
, rt
);
19390 check_insn(ctx
, ISA_MIPS32R2
);
19391 save_cpu_state(ctx
, 1);
19392 gen_helper_di(t0
, cpu_env
);
19393 gen_store_gpr(t0
, rt
);
19394 /* Stop translation as we may have switched
19395 the execution mode. */
19396 ctx
->bstate
= BS_STOP
;
19399 check_insn(ctx
, ISA_MIPS32R2
);
19400 save_cpu_state(ctx
, 1);
19401 gen_helper_ei(t0
, cpu_env
);
19402 gen_store_gpr(t0
, rt
);
19403 /* Stop translation as we may have switched
19404 the execution mode. */
19405 ctx
->bstate
= BS_STOP
;
19407 default: /* Invalid */
19408 MIPS_INVAL("mfmc0");
19409 generate_exception_end(ctx
, EXCP_RI
);
19414 #endif /* !CONFIG_USER_ONLY */
19417 check_insn(ctx
, ISA_MIPS32R2
);
19418 gen_load_srsgpr(rt
, rd
);
19421 check_insn(ctx
, ISA_MIPS32R2
);
19422 gen_store_srsgpr(rt
, rd
);
19426 generate_exception_end(ctx
, EXCP_RI
);
19430 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19431 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19432 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19433 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19436 /* Arithmetic with immediate opcode */
19437 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19441 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19443 case OPC_SLTI
: /* Set on less than with immediate opcode */
19445 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19447 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19448 case OPC_LUI
: /* OPC_AUI */
19451 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19453 case OPC_J
... OPC_JAL
: /* Jump */
19454 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19455 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19458 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19459 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19461 generate_exception_end(ctx
, EXCP_RI
);
19464 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19465 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19468 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19471 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19472 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19474 generate_exception_end(ctx
, EXCP_RI
);
19477 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19478 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19481 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19484 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19487 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19489 check_insn(ctx
, ISA_MIPS32R6
);
19490 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19491 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19494 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19497 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19499 check_insn(ctx
, ISA_MIPS32R6
);
19500 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19501 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19506 check_insn(ctx
, ISA_MIPS2
);
19507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19511 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19513 case OPC_LL
: /* Load and stores */
19514 check_insn(ctx
, ISA_MIPS2
);
19518 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19520 case OPC_LB
... OPC_LH
:
19521 case OPC_LW
... OPC_LHU
:
19522 gen_ld(ctx
, op
, rt
, rs
, imm
);
19526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19528 case OPC_SB
... OPC_SH
:
19530 gen_st(ctx
, op
, rt
, rs
, imm
);
19533 check_insn(ctx
, ISA_MIPS2
);
19534 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19535 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19538 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19539 check_cp0_enabled(ctx
);
19540 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19541 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19542 gen_cache_operation(ctx
, rt
, rs
, imm
);
19544 /* Treat as NOP. */
19547 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19548 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19549 /* Treat as NOP. */
19552 /* Floating point (COP1). */
19557 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19561 op1
= MASK_CP1(ctx
->opcode
);
19566 check_cp1_enabled(ctx
);
19567 check_insn(ctx
, ISA_MIPS32R2
);
19572 check_cp1_enabled(ctx
);
19573 gen_cp1(ctx
, op1
, rt
, rd
);
19575 #if defined(TARGET_MIPS64)
19578 check_cp1_enabled(ctx
);
19579 check_insn(ctx
, ISA_MIPS3
);
19580 check_mips_64(ctx
);
19581 gen_cp1(ctx
, op1
, rt
, rd
);
19584 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19585 check_cp1_enabled(ctx
);
19586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19588 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19593 check_insn(ctx
, ASE_MIPS3D
);
19594 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19595 (rt
>> 2) & 0x7, imm
<< 2);
19599 check_cp1_enabled(ctx
);
19600 check_insn(ctx
, ISA_MIPS32R6
);
19601 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19605 check_cp1_enabled(ctx
);
19606 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19608 check_insn(ctx
, ASE_MIPS3D
);
19611 check_cp1_enabled(ctx
);
19612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19613 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19614 (rt
>> 2) & 0x7, imm
<< 2);
19621 check_cp1_enabled(ctx
);
19622 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19628 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19629 check_cp1_enabled(ctx
);
19630 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19632 case R6_OPC_CMP_AF_S
:
19633 case R6_OPC_CMP_UN_S
:
19634 case R6_OPC_CMP_EQ_S
:
19635 case R6_OPC_CMP_UEQ_S
:
19636 case R6_OPC_CMP_LT_S
:
19637 case R6_OPC_CMP_ULT_S
:
19638 case R6_OPC_CMP_LE_S
:
19639 case R6_OPC_CMP_ULE_S
:
19640 case R6_OPC_CMP_SAF_S
:
19641 case R6_OPC_CMP_SUN_S
:
19642 case R6_OPC_CMP_SEQ_S
:
19643 case R6_OPC_CMP_SEUQ_S
:
19644 case R6_OPC_CMP_SLT_S
:
19645 case R6_OPC_CMP_SULT_S
:
19646 case R6_OPC_CMP_SLE_S
:
19647 case R6_OPC_CMP_SULE_S
:
19648 case R6_OPC_CMP_OR_S
:
19649 case R6_OPC_CMP_UNE_S
:
19650 case R6_OPC_CMP_NE_S
:
19651 case R6_OPC_CMP_SOR_S
:
19652 case R6_OPC_CMP_SUNE_S
:
19653 case R6_OPC_CMP_SNE_S
:
19654 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19656 case R6_OPC_CMP_AF_D
:
19657 case R6_OPC_CMP_UN_D
:
19658 case R6_OPC_CMP_EQ_D
:
19659 case R6_OPC_CMP_UEQ_D
:
19660 case R6_OPC_CMP_LT_D
:
19661 case R6_OPC_CMP_ULT_D
:
19662 case R6_OPC_CMP_LE_D
:
19663 case R6_OPC_CMP_ULE_D
:
19664 case R6_OPC_CMP_SAF_D
:
19665 case R6_OPC_CMP_SUN_D
:
19666 case R6_OPC_CMP_SEQ_D
:
19667 case R6_OPC_CMP_SEUQ_D
:
19668 case R6_OPC_CMP_SLT_D
:
19669 case R6_OPC_CMP_SULT_D
:
19670 case R6_OPC_CMP_SLE_D
:
19671 case R6_OPC_CMP_SULE_D
:
19672 case R6_OPC_CMP_OR_D
:
19673 case R6_OPC_CMP_UNE_D
:
19674 case R6_OPC_CMP_NE_D
:
19675 case R6_OPC_CMP_SOR_D
:
19676 case R6_OPC_CMP_SUNE_D
:
19677 case R6_OPC_CMP_SNE_D
:
19678 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19681 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19682 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19687 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19702 check_insn(ctx
, ASE_MSA
);
19703 gen_msa_branch(env
, ctx
, op1
);
19707 generate_exception_end(ctx
, EXCP_RI
);
19712 /* Compact branches [R6] and COP2 [non-R6] */
19713 case OPC_BC
: /* OPC_LWC2 */
19714 case OPC_BALC
: /* OPC_SWC2 */
19715 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19716 /* OPC_BC, OPC_BALC */
19717 gen_compute_compact_branch(ctx
, op
, 0, 0,
19718 sextract32(ctx
->opcode
<< 2, 0, 28));
19720 /* OPC_LWC2, OPC_SWC2 */
19721 /* COP2: Not implemented. */
19722 generate_exception_err(ctx
, EXCP_CpU
, 2);
19725 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19726 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19727 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19729 /* OPC_BEQZC, OPC_BNEZC */
19730 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19731 sextract32(ctx
->opcode
<< 2, 0, 23));
19733 /* OPC_JIC, OPC_JIALC */
19734 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19737 /* OPC_LWC2, OPC_SWC2 */
19738 /* COP2: Not implemented. */
19739 generate_exception_err(ctx
, EXCP_CpU
, 2);
19743 check_insn(ctx
, INSN_LOONGSON2F
);
19744 /* Note that these instructions use different fields. */
19745 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19749 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19750 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19751 check_cp1_enabled(ctx
);
19752 op1
= MASK_CP3(ctx
->opcode
);
19756 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19762 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19763 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19766 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19767 /* Treat as NOP. */
19770 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19784 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19785 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19789 generate_exception_end(ctx
, EXCP_RI
);
19793 generate_exception_err(ctx
, EXCP_CpU
, 1);
19797 #if defined(TARGET_MIPS64)
19798 /* MIPS64 opcodes */
19799 case OPC_LDL
... OPC_LDR
:
19801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19805 check_insn(ctx
, ISA_MIPS3
);
19806 check_mips_64(ctx
);
19807 gen_ld(ctx
, op
, rt
, rs
, imm
);
19809 case OPC_SDL
... OPC_SDR
:
19810 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19813 check_insn(ctx
, ISA_MIPS3
);
19814 check_mips_64(ctx
);
19815 gen_st(ctx
, op
, rt
, rs
, imm
);
19818 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19819 check_insn(ctx
, ISA_MIPS3
);
19820 check_mips_64(ctx
);
19821 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19823 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19824 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19825 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19826 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19829 check_insn(ctx
, ISA_MIPS3
);
19830 check_mips_64(ctx
);
19831 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19835 check_insn(ctx
, ISA_MIPS3
);
19836 check_mips_64(ctx
);
19837 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19840 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19841 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19842 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19844 MIPS_INVAL("major opcode");
19845 generate_exception_end(ctx
, EXCP_RI
);
19849 case OPC_DAUI
: /* OPC_JALX */
19850 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19851 #if defined(TARGET_MIPS64)
19853 check_mips_64(ctx
);
19855 generate_exception(ctx
, EXCP_RI
);
19856 } else if (rt
!= 0) {
19857 TCGv t0
= tcg_temp_new();
19858 gen_load_gpr(t0
, rs
);
19859 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19863 generate_exception_end(ctx
, EXCP_RI
);
19864 MIPS_INVAL("major opcode");
19868 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19869 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19870 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19873 case OPC_MSA
: /* OPC_MDMX */
19874 /* MDMX: Not implemented. */
19878 check_insn(ctx
, ISA_MIPS32R6
);
19879 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19881 default: /* Invalid */
19882 MIPS_INVAL("major opcode");
19883 generate_exception_end(ctx
, EXCP_RI
);
19888 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19890 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19891 CPUState
*cs
= CPU(cpu
);
19893 target_ulong pc_start
;
19894 target_ulong next_page_start
;
19901 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19904 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19905 ctx
.insn_flags
= env
->insn_flags
;
19906 ctx
.CP0_Config1
= env
->CP0_Config1
;
19908 ctx
.bstate
= BS_NONE
;
19910 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19911 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19912 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19913 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19914 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19915 ctx
.PAMask
= env
->PAMask
;
19916 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19917 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19918 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19919 /* Restore delay slot state from the tb context. */
19920 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19921 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19922 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19923 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19924 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19925 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19926 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
19927 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
19928 restore_cpu_state(env
, &ctx
);
19929 #ifdef CONFIG_USER_ONLY
19930 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19932 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19934 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19935 MO_UNALN
: MO_ALIGN
;
19937 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19938 if (max_insns
== 0) {
19939 max_insns
= CF_COUNT_MASK
;
19941 if (max_insns
> TCG_MAX_INSNS
) {
19942 max_insns
= TCG_MAX_INSNS
;
19945 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19947 while (ctx
.bstate
== BS_NONE
) {
19948 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19951 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19952 save_cpu_state(&ctx
, 1);
19953 ctx
.bstate
= BS_BRANCH
;
19954 gen_helper_raise_exception_debug(cpu_env
);
19955 /* The address covered by the breakpoint must be included in
19956 [tb->pc, tb->pc + tb->size) in order to for it to be
19957 properly cleared -- thus we increment the PC here so that
19958 the logic setting tb->size below does the right thing. */
19960 goto done_generating
;
19963 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19967 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19968 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19969 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19971 decode_opc(env
, &ctx
);
19972 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19973 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19974 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19975 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19976 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19977 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19979 generate_exception_end(&ctx
, EXCP_RI
);
19983 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19984 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19985 MIPS_HFLAG_FBNSLOT
))) {
19986 /* force to generate branch as there is neither delay nor
19990 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19991 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19992 /* Force to generate branch as microMIPS R6 doesn't restrict
19993 branches in the forbidden slot. */
19998 gen_branch(&ctx
, insn_bytes
);
20000 ctx
.pc
+= insn_bytes
;
20002 /* Execute a branch and its delay slot as a single instruction.
20003 This is what GDB expects and is consistent with what the
20004 hardware does (e.g. if a delay slot instruction faults, the
20005 reported PC is the PC of the branch). */
20006 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20010 if (ctx
.pc
>= next_page_start
) {
20014 if (tcg_op_buf_full()) {
20018 if (num_insns
>= max_insns
)
20024 if (tb
->cflags
& CF_LAST_IO
) {
20027 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20028 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20029 gen_helper_raise_exception_debug(cpu_env
);
20031 switch (ctx
.bstate
) {
20033 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20036 save_cpu_state(&ctx
, 0);
20037 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20040 tcg_gen_exit_tb(0);
20048 gen_tb_end(tb
, num_insns
);
20050 tb
->size
= ctx
.pc
- pc_start
;
20051 tb
->icount
= num_insns
;
20055 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20056 && qemu_log_in_addr_range(pc_start
)) {
20058 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20059 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20066 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20070 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20072 #define printfpr(fp) \
20075 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20076 " fd:%13g fs:%13g psu: %13g\n", \
20077 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20078 (double)(fp)->fd, \
20079 (double)(fp)->fs[FP_ENDIAN_IDX], \
20080 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20083 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20084 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20085 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20086 " fd:%13g fs:%13g psu:%13g\n", \
20087 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20089 (double)tmp.fs[FP_ENDIAN_IDX], \
20090 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20095 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20096 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20097 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20098 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20099 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20100 printfpr(&env
->active_fpu
.fpr
[i
]);
20106 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20109 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20110 CPUMIPSState
*env
= &cpu
->env
;
20113 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20114 " LO=0x" TARGET_FMT_lx
" ds %04x "
20115 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20116 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20117 env
->hflags
, env
->btarget
, env
->bcond
);
20118 for (i
= 0; i
< 32; i
++) {
20120 cpu_fprintf(f
, "GPR%02d:", i
);
20121 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20123 cpu_fprintf(f
, "\n");
20126 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20127 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20128 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20130 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20131 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20132 env
->CP0_Config2
, env
->CP0_Config3
);
20133 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20134 env
->CP0_Config4
, env
->CP0_Config5
);
20135 if (env
->hflags
& MIPS_HFLAG_FPU
)
20136 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20139 void mips_tcg_init(void)
20144 /* Initialize various static tables. */
20148 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20149 tcg_ctx
.tcg_env
= cpu_env
;
20151 TCGV_UNUSED(cpu_gpr
[0]);
20152 for (i
= 1; i
< 32; i
++)
20153 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20154 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20157 for (i
= 0; i
< 32; i
++) {
20158 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20160 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20161 /* The scalar floating-point unit (FPU) registers are mapped on
20162 * the MSA vector registers. */
20163 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20164 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20165 msa_wr_d
[i
* 2 + 1] =
20166 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20169 cpu_PC
= tcg_global_mem_new(cpu_env
,
20170 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20171 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20172 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20173 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20175 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20176 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20179 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20180 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20182 bcond
= tcg_global_mem_new(cpu_env
,
20183 offsetof(CPUMIPSState
, bcond
), "bcond");
20184 btarget
= tcg_global_mem_new(cpu_env
,
20185 offsetof(CPUMIPSState
, btarget
), "btarget");
20186 hflags
= tcg_global_mem_new_i32(cpu_env
,
20187 offsetof(CPUMIPSState
, hflags
), "hflags");
20189 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20190 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20192 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20193 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20199 #include "translate_init.c"
20201 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20205 const mips_def_t
*def
;
20207 def
= cpu_mips_find_by_name(cpu_model
);
20210 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20212 env
->cpu_model
= def
;
20213 env
->exception_base
= (int32_t)0xBFC00000;
20215 #ifndef CONFIG_USER_ONLY
20216 mmu_init(env
, def
);
20218 fpu_init(env
, def
);
20219 mvp_init(env
, def
);
20221 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20226 bool cpu_supports_cps_smp(const char *cpu_model
)
20228 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20233 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20236 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20238 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20243 return (def
->insn_flags
& isa
) != 0;
20246 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20248 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20249 vp
->env
.exception_base
= address
;
20252 void cpu_state_reset(CPUMIPSState
*env
)
20254 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20255 CPUState
*cs
= CPU(cpu
);
20257 /* Reset registers to their default values */
20258 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20259 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20260 #ifdef TARGET_WORDS_BIGENDIAN
20261 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20263 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20264 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20265 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20266 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20267 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20268 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20269 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20270 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20271 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20272 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20273 << env
->cpu_model
->CP0_LLAddr_shift
;
20274 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20275 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20276 env
->CCRes
= env
->cpu_model
->CCRes
;
20277 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20278 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20279 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20280 env
->current_tc
= 0;
20281 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20282 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20283 #if defined(TARGET_MIPS64)
20284 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20285 env
->SEGMask
|= 3ULL << 62;
20288 env
->PABITS
= env
->cpu_model
->PABITS
;
20289 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20290 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20291 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20292 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20293 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20294 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20295 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20296 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20297 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20298 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20299 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20300 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20301 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20302 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20303 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20304 env
->msair
= env
->cpu_model
->MSAIR
;
20305 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20307 #if defined(CONFIG_USER_ONLY)
20308 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20309 # ifdef TARGET_MIPS64
20310 /* Enable 64-bit register mode. */
20311 env
->CP0_Status
|= (1 << CP0St_PX
);
20313 # ifdef TARGET_ABI_MIPSN64
20314 /* Enable 64-bit address mode. */
20315 env
->CP0_Status
|= (1 << CP0St_UX
);
20317 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20318 hardware registers. */
20319 env
->CP0_HWREna
|= 0x0000000F;
20320 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20321 env
->CP0_Status
|= (1 << CP0St_CU1
);
20323 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20324 env
->CP0_Status
|= (1 << CP0St_MX
);
20326 # if defined(TARGET_MIPS64)
20327 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20328 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20329 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20330 env
->CP0_Status
|= (1 << CP0St_FR
);
20334 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20335 /* If the exception was raised from a delay slot,
20336 come back to the jump. */
20337 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20338 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20340 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20342 env
->active_tc
.PC
= env
->exception_base
;
20343 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20344 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20345 env
->CP0_Wired
= 0;
20346 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20347 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20348 if (kvm_enabled()) {
20349 env
->CP0_EBase
|= 0x40000000;
20351 env
->CP0_EBase
|= 0x80000000;
20353 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20354 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20356 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20358 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20359 /* vectored interrupts not implemented, timer on int 7,
20360 no performance counters. */
20361 env
->CP0_IntCtl
= 0xe0000000;
20365 for (i
= 0; i
< 7; i
++) {
20366 env
->CP0_WatchLo
[i
] = 0;
20367 env
->CP0_WatchHi
[i
] = 0x80000000;
20369 env
->CP0_WatchLo
[7] = 0;
20370 env
->CP0_WatchHi
[7] = 0;
20372 /* Count register increments in debug mode, EJTAG version 1 */
20373 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20375 cpu_mips_store_count(env
, 1);
20377 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20380 /* Only TC0 on VPE 0 starts as active. */
20381 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20382 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20383 env
->tcs
[i
].CP0_TCHalt
= 1;
20385 env
->active_tc
.CP0_TCHalt
= 1;
20388 if (cs
->cpu_index
== 0) {
20389 /* VPE0 starts up enabled. */
20390 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20391 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20393 /* TC0 starts up unhalted. */
20395 env
->active_tc
.CP0_TCHalt
= 0;
20396 env
->tcs
[0].CP0_TCHalt
= 0;
20397 /* With thread 0 active. */
20398 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20399 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20403 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20404 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20405 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20406 env
->CP0_Status
|= (1 << CP0St_FR
);
20410 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20414 compute_hflags(env
);
20415 restore_fp_status(env
);
20416 restore_pamask(env
);
20417 cs
->exception_index
= EXCP_NONE
;
20419 if (semihosting_get_argc()) {
20420 /* UHI interface can be used to obtain argc and argv */
20421 env
->active_tc
.gpr
[4] = -1;
20425 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20426 target_ulong
*data
)
20428 env
->active_tc
.PC
= data
[0];
20429 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20430 env
->hflags
|= data
[1];
20431 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20432 case MIPS_HFLAG_BR
:
20434 case MIPS_HFLAG_BC
:
20435 case MIPS_HFLAG_BL
:
20437 env
->btarget
= data
[2];