2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "target/mips/trace.h"
37 #include "trace-tcg.h"
40 #define MIPS_DEBUG_DISAS 0
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL
= (0x00 << 26),
48 OPC_REGIMM
= (0x01 << 26),
49 OPC_CP0
= (0x10 << 26),
50 OPC_CP1
= (0x11 << 26),
51 OPC_CP2
= (0x12 << 26),
52 OPC_CP3
= (0x13 << 26),
53 OPC_SPECIAL2
= (0x1C << 26),
54 OPC_SPECIAL3
= (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI
= (0x08 << 26),
57 OPC_ADDIU
= (0x09 << 26),
58 OPC_SLTI
= (0x0A << 26),
59 OPC_SLTIU
= (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI
= (0x18 << 26),
67 OPC_DADDIU
= (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL
= (0x03 << 26),
71 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL
= (0x14 << 26),
73 OPC_BNE
= (0x05 << 26),
74 OPC_BNEL
= (0x15 << 26),
75 OPC_BLEZ
= (0x06 << 26),
76 OPC_BLEZL
= (0x16 << 26),
77 OPC_BGTZ
= (0x07 << 26),
78 OPC_BGTZL
= (0x17 << 26),
79 OPC_JALX
= (0x1D << 26),
80 OPC_DAUI
= (0x1D << 26),
82 OPC_LDL
= (0x1A << 26),
83 OPC_LDR
= (0x1B << 26),
84 OPC_LB
= (0x20 << 26),
85 OPC_LH
= (0x21 << 26),
86 OPC_LWL
= (0x22 << 26),
87 OPC_LW
= (0x23 << 26),
88 OPC_LWPC
= OPC_LW
| 0x5,
89 OPC_LBU
= (0x24 << 26),
90 OPC_LHU
= (0x25 << 26),
91 OPC_LWR
= (0x26 << 26),
92 OPC_LWU
= (0x27 << 26),
93 OPC_SB
= (0x28 << 26),
94 OPC_SH
= (0x29 << 26),
95 OPC_SWL
= (0x2A << 26),
96 OPC_SW
= (0x2B << 26),
97 OPC_SDL
= (0x2C << 26),
98 OPC_SDR
= (0x2D << 26),
99 OPC_SWR
= (0x2E << 26),
100 OPC_LL
= (0x30 << 26),
101 OPC_LLD
= (0x34 << 26),
102 OPC_LD
= (0x37 << 26),
103 OPC_LDPC
= OPC_LD
| 0x5,
104 OPC_SC
= (0x38 << 26),
105 OPC_SCD
= (0x3C << 26),
106 OPC_SD
= (0x3F << 26),
107 /* Floating point load/store */
108 OPC_LWC1
= (0x31 << 26),
109 OPC_LWC2
= (0x32 << 26),
110 OPC_LDC1
= (0x35 << 26),
111 OPC_LDC2
= (0x36 << 26),
112 OPC_SWC1
= (0x39 << 26),
113 OPC_SWC2
= (0x3A << 26),
114 OPC_SDC1
= (0x3D << 26),
115 OPC_SDC2
= (0x3E << 26),
116 /* Compact Branches */
117 OPC_BLEZALC
= (0x06 << 26),
118 OPC_BGEZALC
= (0x06 << 26),
119 OPC_BGEUC
= (0x06 << 26),
120 OPC_BGTZALC
= (0x07 << 26),
121 OPC_BLTZALC
= (0x07 << 26),
122 OPC_BLTUC
= (0x07 << 26),
123 OPC_BOVC
= (0x08 << 26),
124 OPC_BEQZALC
= (0x08 << 26),
125 OPC_BEQC
= (0x08 << 26),
126 OPC_BLEZC
= (0x16 << 26),
127 OPC_BGEZC
= (0x16 << 26),
128 OPC_BGEC
= (0x16 << 26),
129 OPC_BGTZC
= (0x17 << 26),
130 OPC_BLTZC
= (0x17 << 26),
131 OPC_BLTC
= (0x17 << 26),
132 OPC_BNVC
= (0x18 << 26),
133 OPC_BNEZALC
= (0x18 << 26),
134 OPC_BNEC
= (0x18 << 26),
135 OPC_BC
= (0x32 << 26),
136 OPC_BEQZC
= (0x36 << 26),
137 OPC_JIC
= (0x36 << 26),
138 OPC_BALC
= (0x3A << 26),
139 OPC_BNEZC
= (0x3E << 26),
140 OPC_JIALC
= (0x3E << 26),
141 /* MDMX ASE specific */
142 OPC_MDMX
= (0x1E << 26),
143 /* MSA ASE, same as MDMX */
145 /* Cache and prefetch */
146 OPC_CACHE
= (0x2F << 26),
147 OPC_PREF
= (0x33 << 26),
148 /* PC-relative address computation / loads */
149 OPC_PCREL
= (0x3B << 26),
152 /* PC-relative address computation / loads */
153 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
154 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
156 /* Instructions determined by bits 19 and 20 */
157 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
158 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
159 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
161 /* Instructions determined by bits 16 ... 20 */
162 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
163 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
166 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
169 /* MIPS special opcodes */
170 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
174 OPC_SLL
= 0x00 | OPC_SPECIAL
,
175 /* NOP is SLL r0, r0, 0 */
176 /* SSNOP is SLL r0, r0, 1 */
177 /* EHB is SLL r0, r0, 3 */
178 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
179 OPC_ROTR
= OPC_SRL
| (1 << 21),
180 OPC_SRA
= 0x03 | OPC_SPECIAL
,
181 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
182 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
183 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
184 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
185 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
186 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
187 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
188 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
189 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
190 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
191 OPC_DROTR
= OPC_DSRL
| (1 << 21),
192 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
193 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
194 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
195 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
196 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
197 /* Multiplication / division */
198 OPC_MULT
= 0x18 | OPC_SPECIAL
,
199 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
200 OPC_DIV
= 0x1A | OPC_SPECIAL
,
201 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
202 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
203 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
204 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
205 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
207 /* 2 registers arithmetic / logic */
208 OPC_ADD
= 0x20 | OPC_SPECIAL
,
209 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
210 OPC_SUB
= 0x22 | OPC_SPECIAL
,
211 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
212 OPC_AND
= 0x24 | OPC_SPECIAL
,
213 OPC_OR
= 0x25 | OPC_SPECIAL
,
214 OPC_XOR
= 0x26 | OPC_SPECIAL
,
215 OPC_NOR
= 0x27 | OPC_SPECIAL
,
216 OPC_SLT
= 0x2A | OPC_SPECIAL
,
217 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
218 OPC_DADD
= 0x2C | OPC_SPECIAL
,
219 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
220 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
221 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
223 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
224 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
226 OPC_TGE
= 0x30 | OPC_SPECIAL
,
227 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
228 OPC_TLT
= 0x32 | OPC_SPECIAL
,
229 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
230 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
231 OPC_TNE
= 0x36 | OPC_SPECIAL
,
232 /* HI / LO registers load & stores */
233 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
234 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
235 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
236 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
237 /* Conditional moves */
238 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
239 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
241 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
242 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
244 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
247 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
248 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
249 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
250 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
251 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
253 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
254 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
255 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
256 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
259 /* R6 Multiply and Divide instructions have the same Opcode
260 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
261 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
264 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
265 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
266 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
267 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
268 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
269 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
270 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
271 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
273 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
274 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
275 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
276 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
277 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
278 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
279 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
280 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
282 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
283 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
284 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
285 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
286 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
288 OPC_LSA
= 0x05 | OPC_SPECIAL
,
289 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
292 /* Multiplication variants of the vr54xx. */
293 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
296 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
297 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
299 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
301 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
303 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
305 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
306 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
307 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
308 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
309 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
312 /* REGIMM (rt field) opcodes */
313 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
316 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
317 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
318 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
319 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
320 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
321 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
322 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
323 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
324 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
325 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
326 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
327 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
328 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
329 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
330 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
331 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
333 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
334 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
337 /* Special2 opcodes */
338 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
341 /* Multiply & xxx operations */
342 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
343 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
344 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
345 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
346 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
348 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
349 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
350 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
351 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
352 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
353 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
354 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
355 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
356 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
357 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
358 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
359 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
361 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
362 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
363 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
364 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
366 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
369 /* Special3 opcodes */
370 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
373 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
374 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
375 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
376 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
377 OPC_INS
= 0x04 | OPC_SPECIAL3
,
378 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
379 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
380 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
381 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
382 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
383 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
384 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
385 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
388 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
389 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
390 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
391 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
392 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
393 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
394 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
395 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
396 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
397 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
398 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
399 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
402 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
403 /* MIPS DSP Arithmetic */
404 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
405 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
407 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
408 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
409 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
410 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
411 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
412 /* MIPS DSP GPR-Based Shift Sub-class */
413 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
414 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
415 /* MIPS DSP Multiply Sub-class insns */
416 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
417 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
418 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
419 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
420 /* DSP Bit/Manipulation Sub-class */
421 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
422 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
423 /* MIPS DSP Append Sub-class */
424 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
425 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
426 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
427 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
428 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
431 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
432 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
433 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
434 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
435 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
436 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
440 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
443 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
444 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
445 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
446 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
447 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
448 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
452 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
455 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
456 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
457 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
458 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
459 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
462 /* MIPS DSP REGIMM opcodes */
464 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
465 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
468 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
471 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
472 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
473 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
474 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
477 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
479 /* MIPS DSP Arithmetic Sub-class */
480 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
497 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
498 /* MIPS DSP Multiply Sub-class insns */
499 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
504 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
507 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
508 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
510 /* MIPS DSP Arithmetic Sub-class */
511 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
523 /* MIPS DSP Multiply Sub-class insns */
524 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
527 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
530 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
532 /* MIPS DSP Arithmetic Sub-class */
533 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
546 /* DSP Bit/Manipulation Sub-class */
547 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
551 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
554 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
556 /* MIPS DSP Arithmetic Sub-class */
557 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
564 /* DSP Compare-Pick Sub-class */
565 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
582 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP GPR-Based Shift Sub-class */
585 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
609 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
611 /* MIPS DSP Multiply Sub-class insns */
612 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
636 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
638 /* DSP Bit/Manipulation Sub-class */
639 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
642 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
644 /* MIPS DSP Append Sub-class */
645 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
646 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
647 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
650 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
652 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
653 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
664 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
666 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
667 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
668 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
669 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
672 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
674 /* MIPS DSP Arithmetic Sub-class */
675 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
692 /* DSP Bit/Manipulation Sub-class */
693 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
701 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
703 /* MIPS DSP Multiply Sub-class insns */
704 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
709 /* MIPS DSP Arithmetic Sub-class */
710 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
730 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
733 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
735 /* DSP Compare-Pick Sub-class */
736 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
755 /* MIPS DSP Arithmetic Sub-class */
756 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
766 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
768 /* DSP Append Sub-class */
769 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
771 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
772 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
775 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
777 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
778 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
798 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
801 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
803 /* DSP Bit/Manipulation Sub-class */
804 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
807 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
809 /* MIPS DSP Multiply Sub-class insns */
810 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
838 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
840 /* MIPS DSP GPR-Based Shift Sub-class */
841 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
869 /* Coprocessor 0 (rs field) */
870 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
873 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
874 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
875 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
876 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
877 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
878 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
879 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
880 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
881 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
882 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
883 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
884 OPC_C0
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
886 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
890 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
893 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
895 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
896 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
897 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
899 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
900 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
903 /* Coprocessor 0 (with rs == C0) */
904 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
907 OPC_TLBR
= 0x01 | OPC_C0
,
908 OPC_TLBWI
= 0x02 | OPC_C0
,
909 OPC_TLBINV
= 0x03 | OPC_C0
,
910 OPC_TLBINVF
= 0x04 | OPC_C0
,
911 OPC_TLBWR
= 0x06 | OPC_C0
,
912 OPC_TLBP
= 0x08 | OPC_C0
,
913 OPC_RFE
= 0x10 | OPC_C0
,
914 OPC_ERET
= 0x18 | OPC_C0
,
915 OPC_DERET
= 0x1F | OPC_C0
,
916 OPC_WAIT
= 0x20 | OPC_C0
,
919 /* Coprocessor 1 (rs field) */
920 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
922 /* Values for the fmt field in FP instructions */
924 /* 0 - 15 are reserved */
925 FMT_S
= 16, /* single fp */
926 FMT_D
= 17, /* double fp */
927 FMT_E
= 18, /* extended fp */
928 FMT_Q
= 19, /* quad fp */
929 FMT_W
= 20, /* 32-bit fixed */
930 FMT_L
= 21, /* 64-bit fixed */
931 FMT_PS
= 22, /* paired single fp */
932 /* 23 - 31 are reserved */
936 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
937 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
938 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
939 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
940 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
941 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
942 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
943 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
944 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
945 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
946 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
947 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
948 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
949 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
950 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
951 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
952 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
953 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
954 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
955 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
956 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
957 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
958 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
959 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
960 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
961 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
962 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
963 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
964 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
965 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
968 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
969 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
972 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
973 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
974 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
975 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
979 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
980 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
984 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
985 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
988 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
991 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
992 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
993 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
994 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
995 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
996 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
997 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
998 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
999 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1000 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1001 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1004 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1007 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1016 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1025 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1028 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1029 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1030 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1031 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1034 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1043 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1050 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1057 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1064 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1071 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1078 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1085 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1092 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1097 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1101 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1104 OPC_LWXC1
= 0x00 | OPC_CP3
,
1105 OPC_LDXC1
= 0x01 | OPC_CP3
,
1106 OPC_LUXC1
= 0x05 | OPC_CP3
,
1107 OPC_SWXC1
= 0x08 | OPC_CP3
,
1108 OPC_SDXC1
= 0x09 | OPC_CP3
,
1109 OPC_SUXC1
= 0x0D | OPC_CP3
,
1110 OPC_PREFX
= 0x0F | OPC_CP3
,
1111 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1112 OPC_MADD_S
= 0x20 | OPC_CP3
,
1113 OPC_MADD_D
= 0x21 | OPC_CP3
,
1114 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1115 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1116 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1117 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1118 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1119 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1120 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1121 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1122 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1123 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1127 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1129 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1130 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1131 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1132 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1133 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1134 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1135 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1136 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1137 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1138 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1139 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1140 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1141 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1142 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1143 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1144 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1145 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1146 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1147 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1148 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1149 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1151 /* MI10 instruction */
1152 OPC_LD_B
= (0x20) | OPC_MSA
,
1153 OPC_LD_H
= (0x21) | OPC_MSA
,
1154 OPC_LD_W
= (0x22) | OPC_MSA
,
1155 OPC_LD_D
= (0x23) | OPC_MSA
,
1156 OPC_ST_B
= (0x24) | OPC_MSA
,
1157 OPC_ST_H
= (0x25) | OPC_MSA
,
1158 OPC_ST_W
= (0x26) | OPC_MSA
,
1159 OPC_ST_D
= (0x27) | OPC_MSA
,
1163 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1164 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1165 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1166 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1167 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1169 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1173 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1174 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1175 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1177 /* I8 instruction */
1178 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1179 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1180 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1181 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1182 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1183 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1184 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1185 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1186 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1187 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1189 /* VEC/2R/2RF instruction */
1190 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1191 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1192 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1193 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1194 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1195 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1196 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1199 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1201 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1202 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1203 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1204 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1205 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1207 /* 2RF instruction df(bit 16) = _w, _d */
1208 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1210 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1211 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1212 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1213 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1214 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1215 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1217 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1218 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1219 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1221 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1223 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1225 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1226 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1227 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1228 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1229 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1230 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1231 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1232 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1233 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1234 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1235 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1236 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1237 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1238 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1239 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1240 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1241 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1242 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1243 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1244 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1245 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1246 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1247 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1248 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1249 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1250 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1251 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1252 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1253 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1254 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1255 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1256 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1257 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1258 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1259 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1260 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1262 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1263 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1264 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1265 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1266 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1267 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1268 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1269 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1270 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1271 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1272 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1273 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1274 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1275 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1276 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1277 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1278 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1279 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1280 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1281 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1282 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1283 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1284 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1285 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1286 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1287 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1288 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1290 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1291 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1297 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1301 /* 3RF instruction _df(bit 21) = _w, _d */
1302 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1305 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1306 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1307 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1308 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1309 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1310 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1311 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1312 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1313 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1314 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1315 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1316 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1321 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1325 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1327 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1328 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1329 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1332 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1333 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1334 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1335 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1337 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1338 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1339 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1340 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1341 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1342 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1344 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1345 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1352 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1353 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1356 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1359 /* global register indices */
1360 static TCGv_env cpu_env
;
1361 static TCGv cpu_gpr
[32], cpu_PC
;
1362 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1363 static TCGv cpu_dspctrl
, btarget
, bcond
;
1364 static TCGv_i32 hflags
;
1365 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1366 static TCGv_i64 fpu_f64
[32];
1367 static TCGv_i64 msa_wr_d
[64];
1369 #include "exec/gen-icount.h"
1371 #define gen_helper_0e0i(name, arg) do { \
1372 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1373 gen_helper_##name(cpu_env, helper_tmp); \
1374 tcg_temp_free_i32(helper_tmp); \
1377 #define gen_helper_0e1i(name, arg1, arg2) do { \
1378 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1379 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1380 tcg_temp_free_i32(helper_tmp); \
1383 #define gen_helper_1e0i(name, ret, arg1) do { \
1384 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1385 gen_helper_##name(ret, cpu_env, helper_tmp); \
1386 tcg_temp_free_i32(helper_tmp); \
1389 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1390 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1391 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1392 tcg_temp_free_i32(helper_tmp); \
1395 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1396 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1397 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1398 tcg_temp_free_i32(helper_tmp); \
1401 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1402 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1403 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1404 tcg_temp_free_i32(helper_tmp); \
1407 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1408 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1409 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1410 tcg_temp_free_i32(helper_tmp); \
1413 typedef struct DisasContext
{
1414 struct TranslationBlock
*tb
;
1415 target_ulong pc
, saved_pc
;
1417 int singlestep_enabled
;
1419 int32_t CP0_Config1
;
1420 /* Routine used to access memory */
1422 TCGMemOp default_tcg_memop_mask
;
1423 uint32_t hflags
, saved_hflags
;
1425 target_ulong btarget
;
1434 int CP0_LLAddr_shift
;
1444 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1445 * exception condition */
1446 BS_STOP
= 1, /* We want to stop translation for any reason */
1447 BS_BRANCH
= 2, /* We reached a branch condition */
1448 BS_EXCP
= 3, /* We reached an exception condition */
1451 static const char * const regnames
[] = {
1452 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1453 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1454 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1455 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1458 static const char * const regnames_HI
[] = {
1459 "HI0", "HI1", "HI2", "HI3",
1462 static const char * const regnames_LO
[] = {
1463 "LO0", "LO1", "LO2", "LO3",
1466 static const char * const fregnames
[] = {
1467 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1468 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1469 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1470 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1473 static const char * const msaregnames
[] = {
1474 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1475 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1476 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1477 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1478 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1479 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1480 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1481 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1482 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1483 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1484 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1485 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1486 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1487 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1488 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1489 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1492 #define LOG_DISAS(...) \
1494 if (MIPS_DEBUG_DISAS) { \
1495 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1499 #define MIPS_INVAL(op) \
1501 if (MIPS_DEBUG_DISAS) { \
1502 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1503 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1504 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1505 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1509 /* General purpose registers moves. */
1510 static inline void gen_load_gpr (TCGv t
, int reg
)
1513 tcg_gen_movi_tl(t
, 0);
1515 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1518 static inline void gen_store_gpr (TCGv t
, int reg
)
1521 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1524 /* Moves to/from shadow registers. */
1525 static inline void gen_load_srsgpr (int from
, int to
)
1527 TCGv t0
= tcg_temp_new();
1530 tcg_gen_movi_tl(t0
, 0);
1532 TCGv_i32 t2
= tcg_temp_new_i32();
1533 TCGv_ptr addr
= tcg_temp_new_ptr();
1535 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1536 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1537 tcg_gen_andi_i32(t2
, t2
, 0xf);
1538 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1539 tcg_gen_ext_i32_ptr(addr
, t2
);
1540 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1542 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1543 tcg_temp_free_ptr(addr
);
1544 tcg_temp_free_i32(t2
);
1546 gen_store_gpr(t0
, to
);
1550 static inline void gen_store_srsgpr (int from
, int to
)
1553 TCGv t0
= tcg_temp_new();
1554 TCGv_i32 t2
= tcg_temp_new_i32();
1555 TCGv_ptr addr
= tcg_temp_new_ptr();
1557 gen_load_gpr(t0
, from
);
1558 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1559 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1560 tcg_gen_andi_i32(t2
, t2
, 0xf);
1561 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1562 tcg_gen_ext_i32_ptr(addr
, t2
);
1563 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1565 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1566 tcg_temp_free_ptr(addr
);
1567 tcg_temp_free_i32(t2
);
1573 static inline void gen_save_pc(target_ulong pc
)
1575 tcg_gen_movi_tl(cpu_PC
, pc
);
1578 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1580 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1581 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1582 gen_save_pc(ctx
->pc
);
1583 ctx
->saved_pc
= ctx
->pc
;
1585 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1586 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1587 ctx
->saved_hflags
= ctx
->hflags
;
1588 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1594 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1600 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1602 ctx
->saved_hflags
= ctx
->hflags
;
1603 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1609 ctx
->btarget
= env
->btarget
;
1614 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1616 TCGv_i32 texcp
= tcg_const_i32(excp
);
1617 TCGv_i32 terr
= tcg_const_i32(err
);
1618 save_cpu_state(ctx
, 1);
1619 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1620 tcg_temp_free_i32(terr
);
1621 tcg_temp_free_i32(texcp
);
1622 ctx
->bstate
= BS_EXCP
;
1625 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1627 gen_helper_0e0i(raise_exception
, excp
);
1630 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1632 generate_exception_err(ctx
, excp
, 0);
1635 /* Floating point register moves. */
1636 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1638 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1639 generate_exception(ctx
, EXCP_RI
);
1641 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1644 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1647 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1648 generate_exception(ctx
, EXCP_RI
);
1650 t64
= tcg_temp_new_i64();
1651 tcg_gen_extu_i32_i64(t64
, t
);
1652 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1653 tcg_temp_free_i64(t64
);
1656 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1658 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1659 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1661 gen_load_fpr32(ctx
, t
, reg
| 1);
1665 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1668 TCGv_i64 t64
= tcg_temp_new_i64();
1669 tcg_gen_extu_i32_i64(t64
, t
);
1670 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1671 tcg_temp_free_i64(t64
);
1673 gen_store_fpr32(ctx
, t
, reg
| 1);
1677 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1679 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1680 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1682 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1686 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1688 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1689 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1692 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1693 t0
= tcg_temp_new_i64();
1694 tcg_gen_shri_i64(t0
, t
, 32);
1695 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1696 tcg_temp_free_i64(t0
);
1700 static inline int get_fp_bit (int cc
)
1708 /* Addresses computation */
1709 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1711 tcg_gen_add_tl(ret
, arg0
, arg1
);
1713 #if defined(TARGET_MIPS64)
1714 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1715 tcg_gen_ext32s_i64(ret
, ret
);
1720 /* Addresses computation (translation time) */
1721 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1724 target_long sum
= base
+ offset
;
1726 #if defined(TARGET_MIPS64)
1727 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1734 /* Sign-extract the low 32-bits to a target_long. */
1735 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1737 #if defined(TARGET_MIPS64)
1738 tcg_gen_ext32s_i64(ret
, arg
);
1740 tcg_gen_extrl_i64_i32(ret
, arg
);
1744 /* Sign-extract the high 32-bits to a target_long. */
1745 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1747 #if defined(TARGET_MIPS64)
1748 tcg_gen_sari_i64(ret
, arg
, 32);
1750 tcg_gen_extrh_i64_i32(ret
, arg
);
1754 static inline void check_cp0_enabled(DisasContext
*ctx
)
1756 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1757 generate_exception_err(ctx
, EXCP_CpU
, 0);
1760 static inline void check_cp1_enabled(DisasContext
*ctx
)
1762 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1763 generate_exception_err(ctx
, EXCP_CpU
, 1);
1766 /* Verify that the processor is running with COP1X instructions enabled.
1767 This is associated with the nabla symbol in the MIPS32 and MIPS64
1770 static inline void check_cop1x(DisasContext
*ctx
)
1772 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1773 generate_exception_end(ctx
, EXCP_RI
);
1776 /* Verify that the processor is running with 64-bit floating-point
1777 operations enabled. */
1779 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1781 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1782 generate_exception_end(ctx
, EXCP_RI
);
1786 * Verify if floating point register is valid; an operation is not defined
1787 * if bit 0 of any register specification is set and the FR bit in the
1788 * Status register equals zero, since the register numbers specify an
1789 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1790 * in the Status register equals one, both even and odd register numbers
1791 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1793 * Multiple 64 bit wide registers can be checked by calling
1794 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1796 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1798 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1799 generate_exception_end(ctx
, EXCP_RI
);
1802 /* Verify that the processor is running with DSP instructions enabled.
1803 This is enabled by CP0 Status register MX(24) bit.
1806 static inline void check_dsp(DisasContext
*ctx
)
1808 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1809 if (ctx
->insn_flags
& ASE_DSP
) {
1810 generate_exception_end(ctx
, EXCP_DSPDIS
);
1812 generate_exception_end(ctx
, EXCP_RI
);
1817 static inline void check_dspr2(DisasContext
*ctx
)
1819 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1820 if (ctx
->insn_flags
& ASE_DSP
) {
1821 generate_exception_end(ctx
, EXCP_DSPDIS
);
1823 generate_exception_end(ctx
, EXCP_RI
);
1828 /* This code generates a "reserved instruction" exception if the
1829 CPU does not support the instruction set corresponding to flags. */
1830 static inline void check_insn(DisasContext
*ctx
, int flags
)
1832 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1833 generate_exception_end(ctx
, EXCP_RI
);
1837 /* This code generates a "reserved instruction" exception if the
1838 CPU has corresponding flag set which indicates that the instruction
1839 has been removed. */
1840 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1842 if (unlikely(ctx
->insn_flags
& flags
)) {
1843 generate_exception_end(ctx
, EXCP_RI
);
1847 /* This code generates a "reserved instruction" exception if the
1848 CPU does not support 64-bit paired-single (PS) floating point data type */
1849 static inline void check_ps(DisasContext
*ctx
)
1851 if (unlikely(!ctx
->ps
)) {
1852 generate_exception(ctx
, EXCP_RI
);
1854 check_cp1_64bitmode(ctx
);
1857 #ifdef TARGET_MIPS64
1858 /* This code generates a "reserved instruction" exception if 64-bit
1859 instructions are not enabled. */
1860 static inline void check_mips_64(DisasContext
*ctx
)
1862 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1863 generate_exception_end(ctx
, EXCP_RI
);
1867 #ifndef CONFIG_USER_ONLY
1868 static inline void check_mvh(DisasContext
*ctx
)
1870 if (unlikely(!ctx
->mvh
)) {
1871 generate_exception(ctx
, EXCP_RI
);
1876 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1877 calling interface for 32 and 64-bit FPRs. No sense in changing
1878 all callers for gen_load_fpr32 when we need the CTX parameter for
1880 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1881 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1882 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1883 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1884 int ft, int fs, int cc) \
1886 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1887 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1896 check_cp1_registers(ctx, fs | ft); \
1904 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1905 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1907 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1908 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1909 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1910 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1911 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1912 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1913 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1914 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1915 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1916 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1917 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1918 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1919 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1920 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1921 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1922 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1925 tcg_temp_free_i##bits (fp0); \
1926 tcg_temp_free_i##bits (fp1); \
1929 FOP_CONDS(, 0, d
, FMT_D
, 64)
1930 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1931 FOP_CONDS(, 0, s
, FMT_S
, 32)
1932 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1933 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1934 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1937 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1938 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1939 int ft, int fs, int fd) \
1941 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1942 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1943 if (ifmt == FMT_D) { \
1944 check_cp1_registers(ctx, fs | ft | fd); \
1946 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1947 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1950 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2007 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2010 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2013 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2019 tcg_temp_free_i ## bits (fp0); \
2020 tcg_temp_free_i ## bits (fp1); \
2023 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2024 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2026 #undef gen_ldcmp_fpr32
2027 #undef gen_ldcmp_fpr64
2029 /* load/store instructions. */
2030 #ifdef CONFIG_USER_ONLY
2031 #define OP_LD_ATOMIC(insn,fname) \
2032 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2034 TCGv t0 = tcg_temp_new(); \
2035 tcg_gen_mov_tl(t0, arg1); \
2036 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2037 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2038 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2039 tcg_temp_free(t0); \
2042 #define OP_LD_ATOMIC(insn,fname) \
2043 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2045 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2048 OP_LD_ATOMIC(ll
,ld32s
);
2049 #if defined(TARGET_MIPS64)
2050 OP_LD_ATOMIC(lld
,ld64
);
2054 #ifdef CONFIG_USER_ONLY
2055 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2056 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2058 TCGv t0 = tcg_temp_new(); \
2059 TCGLabel *l1 = gen_new_label(); \
2060 TCGLabel *l2 = gen_new_label(); \
2062 tcg_gen_andi_tl(t0, arg2, almask); \
2063 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2064 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2065 generate_exception(ctx, EXCP_AdES); \
2066 gen_set_label(l1); \
2067 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2068 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2069 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2070 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2071 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2072 generate_exception_end(ctx, EXCP_SC); \
2073 gen_set_label(l2); \
2074 tcg_gen_movi_tl(t0, 0); \
2075 gen_store_gpr(t0, rt); \
2076 tcg_temp_free(t0); \
2079 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2080 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2082 TCGv t0 = tcg_temp_new(); \
2083 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2084 gen_store_gpr(t0, rt); \
2085 tcg_temp_free(t0); \
2088 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2089 #if defined(TARGET_MIPS64)
2090 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2094 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2095 int base
, int16_t offset
)
2098 tcg_gen_movi_tl(addr
, offset
);
2099 } else if (offset
== 0) {
2100 gen_load_gpr(addr
, base
);
2102 tcg_gen_movi_tl(addr
, offset
);
2103 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2107 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2109 target_ulong pc
= ctx
->pc
;
2111 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2112 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2117 pc
&= ~(target_ulong
)3;
2122 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2123 int rt
, int base
, int16_t offset
)
2127 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2128 /* Loongson CPU uses a load to zero register for prefetch.
2129 We emulate it as a NOP. On other CPU we must perform the
2130 actual memory access. */
2134 t0
= tcg_temp_new();
2135 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2138 #if defined(TARGET_MIPS64)
2140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2141 ctx
->default_tcg_memop_mask
);
2142 gen_store_gpr(t0
, rt
);
2145 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2146 ctx
->default_tcg_memop_mask
);
2147 gen_store_gpr(t0
, rt
);
2151 op_ld_lld(t0
, t0
, ctx
);
2152 gen_store_gpr(t0
, rt
);
2155 t1
= tcg_temp_new();
2156 /* Do a byte access to possibly trigger a page
2157 fault with the unaligned address. */
2158 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2159 tcg_gen_andi_tl(t1
, t0
, 7);
2160 #ifndef TARGET_WORDS_BIGENDIAN
2161 tcg_gen_xori_tl(t1
, t1
, 7);
2163 tcg_gen_shli_tl(t1
, t1
, 3);
2164 tcg_gen_andi_tl(t0
, t0
, ~7);
2165 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2166 tcg_gen_shl_tl(t0
, t0
, t1
);
2167 t2
= tcg_const_tl(-1);
2168 tcg_gen_shl_tl(t2
, t2
, t1
);
2169 gen_load_gpr(t1
, rt
);
2170 tcg_gen_andc_tl(t1
, t1
, t2
);
2172 tcg_gen_or_tl(t0
, t0
, t1
);
2174 gen_store_gpr(t0
, rt
);
2177 t1
= tcg_temp_new();
2178 /* Do a byte access to possibly trigger a page
2179 fault with the unaligned address. */
2180 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2181 tcg_gen_andi_tl(t1
, t0
, 7);
2182 #ifdef TARGET_WORDS_BIGENDIAN
2183 tcg_gen_xori_tl(t1
, t1
, 7);
2185 tcg_gen_shli_tl(t1
, t1
, 3);
2186 tcg_gen_andi_tl(t0
, t0
, ~7);
2187 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2188 tcg_gen_shr_tl(t0
, t0
, t1
);
2189 tcg_gen_xori_tl(t1
, t1
, 63);
2190 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2191 tcg_gen_shl_tl(t2
, t2
, t1
);
2192 gen_load_gpr(t1
, rt
);
2193 tcg_gen_and_tl(t1
, t1
, t2
);
2195 tcg_gen_or_tl(t0
, t0
, t1
);
2197 gen_store_gpr(t0
, rt
);
2200 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2201 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2203 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2204 gen_store_gpr(t0
, rt
);
2208 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2209 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2212 gen_store_gpr(t0
, rt
);
2215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2216 ctx
->default_tcg_memop_mask
);
2217 gen_store_gpr(t0
, rt
);
2220 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2221 ctx
->default_tcg_memop_mask
);
2222 gen_store_gpr(t0
, rt
);
2225 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2226 ctx
->default_tcg_memop_mask
);
2227 gen_store_gpr(t0
, rt
);
2230 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2231 gen_store_gpr(t0
, rt
);
2234 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2235 gen_store_gpr(t0
, rt
);
2238 t1
= tcg_temp_new();
2239 /* Do a byte access to possibly trigger a page
2240 fault with the unaligned address. */
2241 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2242 tcg_gen_andi_tl(t1
, t0
, 3);
2243 #ifndef TARGET_WORDS_BIGENDIAN
2244 tcg_gen_xori_tl(t1
, t1
, 3);
2246 tcg_gen_shli_tl(t1
, t1
, 3);
2247 tcg_gen_andi_tl(t0
, t0
, ~3);
2248 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2249 tcg_gen_shl_tl(t0
, t0
, t1
);
2250 t2
= tcg_const_tl(-1);
2251 tcg_gen_shl_tl(t2
, t2
, t1
);
2252 gen_load_gpr(t1
, rt
);
2253 tcg_gen_andc_tl(t1
, t1
, t2
);
2255 tcg_gen_or_tl(t0
, t0
, t1
);
2257 tcg_gen_ext32s_tl(t0
, t0
);
2258 gen_store_gpr(t0
, rt
);
2261 t1
= tcg_temp_new();
2262 /* Do a byte access to possibly trigger a page
2263 fault with the unaligned address. */
2264 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2265 tcg_gen_andi_tl(t1
, t0
, 3);
2266 #ifdef TARGET_WORDS_BIGENDIAN
2267 tcg_gen_xori_tl(t1
, t1
, 3);
2269 tcg_gen_shli_tl(t1
, t1
, 3);
2270 tcg_gen_andi_tl(t0
, t0
, ~3);
2271 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2272 tcg_gen_shr_tl(t0
, t0
, t1
);
2273 tcg_gen_xori_tl(t1
, t1
, 31);
2274 t2
= tcg_const_tl(0xfffffffeull
);
2275 tcg_gen_shl_tl(t2
, t2
, t1
);
2276 gen_load_gpr(t1
, rt
);
2277 tcg_gen_and_tl(t1
, t1
, t2
);
2279 tcg_gen_or_tl(t0
, t0
, t1
);
2281 tcg_gen_ext32s_tl(t0
, t0
);
2282 gen_store_gpr(t0
, rt
);
2286 op_ld_ll(t0
, t0
, ctx
);
2287 gen_store_gpr(t0
, rt
);
2294 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2295 int base
, int16_t offset
)
2297 TCGv t0
= tcg_temp_new();
2298 TCGv t1
= tcg_temp_new();
2300 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2301 gen_load_gpr(t1
, rt
);
2303 #if defined(TARGET_MIPS64)
2305 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2306 ctx
->default_tcg_memop_mask
);
2309 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2312 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2316 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2317 ctx
->default_tcg_memop_mask
);
2320 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2321 ctx
->default_tcg_memop_mask
);
2324 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2327 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2330 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2338 /* Store conditional */
2339 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2340 int base
, int16_t offset
)
2344 #ifdef CONFIG_USER_ONLY
2345 t0
= tcg_temp_local_new();
2346 t1
= tcg_temp_local_new();
2348 t0
= tcg_temp_new();
2349 t1
= tcg_temp_new();
2351 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2352 gen_load_gpr(t1
, rt
);
2354 #if defined(TARGET_MIPS64)
2357 op_st_scd(t1
, t0
, rt
, ctx
);
2362 op_st_sc(t1
, t0
, rt
, ctx
);
2369 /* Load and store */
2370 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2371 int base
, int16_t offset
)
2373 TCGv t0
= tcg_temp_new();
2375 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2376 /* Don't do NOP if destination is zero: we must perform the actual
2381 TCGv_i32 fp0
= tcg_temp_new_i32();
2382 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2383 ctx
->default_tcg_memop_mask
);
2384 gen_store_fpr32(ctx
, fp0
, ft
);
2385 tcg_temp_free_i32(fp0
);
2390 TCGv_i32 fp0
= tcg_temp_new_i32();
2391 gen_load_fpr32(ctx
, fp0
, ft
);
2392 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2393 ctx
->default_tcg_memop_mask
);
2394 tcg_temp_free_i32(fp0
);
2399 TCGv_i64 fp0
= tcg_temp_new_i64();
2400 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2401 ctx
->default_tcg_memop_mask
);
2402 gen_store_fpr64(ctx
, fp0
, ft
);
2403 tcg_temp_free_i64(fp0
);
2408 TCGv_i64 fp0
= tcg_temp_new_i64();
2409 gen_load_fpr64(ctx
, fp0
, ft
);
2410 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2411 ctx
->default_tcg_memop_mask
);
2412 tcg_temp_free_i64(fp0
);
2416 MIPS_INVAL("flt_ldst");
2417 generate_exception_end(ctx
, EXCP_RI
);
2424 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2425 int rs
, int16_t imm
)
2427 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2428 check_cp1_enabled(ctx
);
2432 check_insn(ctx
, ISA_MIPS2
);
2435 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2438 generate_exception_err(ctx
, EXCP_CpU
, 1);
2442 /* Arithmetic with immediate operand */
2443 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2444 int rt
, int rs
, int16_t imm
)
2446 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2448 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2449 /* If no destination, treat it as a NOP.
2450 For addi, we must generate the overflow exception when needed. */
2456 TCGv t0
= tcg_temp_local_new();
2457 TCGv t1
= tcg_temp_new();
2458 TCGv t2
= tcg_temp_new();
2459 TCGLabel
*l1
= gen_new_label();
2461 gen_load_gpr(t1
, rs
);
2462 tcg_gen_addi_tl(t0
, t1
, uimm
);
2463 tcg_gen_ext32s_tl(t0
, t0
);
2465 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2466 tcg_gen_xori_tl(t2
, t0
, uimm
);
2467 tcg_gen_and_tl(t1
, t1
, t2
);
2469 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2471 /* operands of same sign, result different sign */
2472 generate_exception(ctx
, EXCP_OVERFLOW
);
2474 tcg_gen_ext32s_tl(t0
, t0
);
2475 gen_store_gpr(t0
, rt
);
2481 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2482 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2484 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2487 #if defined(TARGET_MIPS64)
2490 TCGv t0
= tcg_temp_local_new();
2491 TCGv t1
= tcg_temp_new();
2492 TCGv t2
= tcg_temp_new();
2493 TCGLabel
*l1
= gen_new_label();
2495 gen_load_gpr(t1
, rs
);
2496 tcg_gen_addi_tl(t0
, t1
, uimm
);
2498 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2499 tcg_gen_xori_tl(t2
, t0
, uimm
);
2500 tcg_gen_and_tl(t1
, t1
, t2
);
2502 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2504 /* operands of same sign, result different sign */
2505 generate_exception(ctx
, EXCP_OVERFLOW
);
2507 gen_store_gpr(t0
, rt
);
2513 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2515 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2522 /* Logic with immediate operand */
2523 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2524 int rt
, int rs
, int16_t imm
)
2529 /* If no destination, treat it as a NOP. */
2532 uimm
= (uint16_t)imm
;
2535 if (likely(rs
!= 0))
2536 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2542 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2544 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2547 if (likely(rs
!= 0))
2548 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2550 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2553 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2555 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2556 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2558 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2567 /* Set on less than with immediate operand */
2568 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2569 int rt
, int rs
, int16_t imm
)
2571 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2575 /* If no destination, treat it as a NOP. */
2578 t0
= tcg_temp_new();
2579 gen_load_gpr(t0
, rs
);
2582 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2585 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2591 /* Shifts with immediate operand */
2592 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2593 int rt
, int rs
, int16_t imm
)
2595 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2599 /* If no destination, treat it as a NOP. */
2603 t0
= tcg_temp_new();
2604 gen_load_gpr(t0
, rs
);
2607 tcg_gen_shli_tl(t0
, t0
, uimm
);
2608 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2611 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2615 tcg_gen_ext32u_tl(t0
, t0
);
2616 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2618 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2623 TCGv_i32 t1
= tcg_temp_new_i32();
2625 tcg_gen_trunc_tl_i32(t1
, t0
);
2626 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2627 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2628 tcg_temp_free_i32(t1
);
2630 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2633 #if defined(TARGET_MIPS64)
2635 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2638 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2645 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2647 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2651 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2654 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2657 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2660 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2668 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2669 int rd
, int rs
, int rt
)
2671 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2672 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2673 /* If no destination, treat it as a NOP.
2674 For add & sub, we must generate the overflow exception when needed. */
2681 TCGv t0
= tcg_temp_local_new();
2682 TCGv t1
= tcg_temp_new();
2683 TCGv t2
= tcg_temp_new();
2684 TCGLabel
*l1
= gen_new_label();
2686 gen_load_gpr(t1
, rs
);
2687 gen_load_gpr(t2
, rt
);
2688 tcg_gen_add_tl(t0
, t1
, t2
);
2689 tcg_gen_ext32s_tl(t0
, t0
);
2690 tcg_gen_xor_tl(t1
, t1
, t2
);
2691 tcg_gen_xor_tl(t2
, t0
, t2
);
2692 tcg_gen_andc_tl(t1
, t2
, t1
);
2694 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2696 /* operands of same sign, result different sign */
2697 generate_exception(ctx
, EXCP_OVERFLOW
);
2699 gen_store_gpr(t0
, rd
);
2704 if (rs
!= 0 && rt
!= 0) {
2705 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2706 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2707 } else if (rs
== 0 && rt
!= 0) {
2708 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2709 } else if (rs
!= 0 && rt
== 0) {
2710 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2712 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2717 TCGv t0
= tcg_temp_local_new();
2718 TCGv t1
= tcg_temp_new();
2719 TCGv t2
= tcg_temp_new();
2720 TCGLabel
*l1
= gen_new_label();
2722 gen_load_gpr(t1
, rs
);
2723 gen_load_gpr(t2
, rt
);
2724 tcg_gen_sub_tl(t0
, t1
, t2
);
2725 tcg_gen_ext32s_tl(t0
, t0
);
2726 tcg_gen_xor_tl(t2
, t1
, t2
);
2727 tcg_gen_xor_tl(t1
, t0
, t1
);
2728 tcg_gen_and_tl(t1
, t1
, t2
);
2730 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2732 /* operands of different sign, first operand and result different sign */
2733 generate_exception(ctx
, EXCP_OVERFLOW
);
2735 gen_store_gpr(t0
, rd
);
2740 if (rs
!= 0 && rt
!= 0) {
2741 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2743 } else if (rs
== 0 && rt
!= 0) {
2744 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2745 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2746 } else if (rs
!= 0 && rt
== 0) {
2747 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2749 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2752 #if defined(TARGET_MIPS64)
2755 TCGv t0
= tcg_temp_local_new();
2756 TCGv t1
= tcg_temp_new();
2757 TCGv t2
= tcg_temp_new();
2758 TCGLabel
*l1
= gen_new_label();
2760 gen_load_gpr(t1
, rs
);
2761 gen_load_gpr(t2
, rt
);
2762 tcg_gen_add_tl(t0
, t1
, t2
);
2763 tcg_gen_xor_tl(t1
, t1
, t2
);
2764 tcg_gen_xor_tl(t2
, t0
, t2
);
2765 tcg_gen_andc_tl(t1
, t2
, t1
);
2767 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2769 /* operands of same sign, result different sign */
2770 generate_exception(ctx
, EXCP_OVERFLOW
);
2772 gen_store_gpr(t0
, rd
);
2777 if (rs
!= 0 && rt
!= 0) {
2778 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2779 } else if (rs
== 0 && rt
!= 0) {
2780 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2781 } else if (rs
!= 0 && rt
== 0) {
2782 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2784 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2789 TCGv t0
= tcg_temp_local_new();
2790 TCGv t1
= tcg_temp_new();
2791 TCGv t2
= tcg_temp_new();
2792 TCGLabel
*l1
= gen_new_label();
2794 gen_load_gpr(t1
, rs
);
2795 gen_load_gpr(t2
, rt
);
2796 tcg_gen_sub_tl(t0
, t1
, t2
);
2797 tcg_gen_xor_tl(t2
, t1
, t2
);
2798 tcg_gen_xor_tl(t1
, t0
, t1
);
2799 tcg_gen_and_tl(t1
, t1
, t2
);
2801 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2803 /* operands of different sign, first operand and result different sign */
2804 generate_exception(ctx
, EXCP_OVERFLOW
);
2806 gen_store_gpr(t0
, rd
);
2811 if (rs
!= 0 && rt
!= 0) {
2812 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2813 } else if (rs
== 0 && rt
!= 0) {
2814 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2815 } else if (rs
!= 0 && rt
== 0) {
2816 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2823 if (likely(rs
!= 0 && rt
!= 0)) {
2824 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2825 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2827 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2833 /* Conditional move */
2834 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2835 int rd
, int rs
, int rt
)
2840 /* If no destination, treat it as a NOP. */
2844 t0
= tcg_temp_new();
2845 gen_load_gpr(t0
, rt
);
2846 t1
= tcg_const_tl(0);
2847 t2
= tcg_temp_new();
2848 gen_load_gpr(t2
, rs
);
2851 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2854 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2857 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2860 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2869 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2870 int rd
, int rs
, int rt
)
2873 /* If no destination, treat it as a NOP. */
2879 if (likely(rs
!= 0 && rt
!= 0)) {
2880 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2886 if (rs
!= 0 && rt
!= 0) {
2887 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2888 } else if (rs
== 0 && rt
!= 0) {
2889 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2890 } else if (rs
!= 0 && rt
== 0) {
2891 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2893 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2897 if (likely(rs
!= 0 && rt
!= 0)) {
2898 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2899 } else if (rs
== 0 && rt
!= 0) {
2900 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2901 } else if (rs
!= 0 && rt
== 0) {
2902 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2904 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2908 if (likely(rs
!= 0 && rt
!= 0)) {
2909 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2910 } else if (rs
== 0 && rt
!= 0) {
2911 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2912 } else if (rs
!= 0 && rt
== 0) {
2913 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2915 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2921 /* Set on lower than */
2922 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2923 int rd
, int rs
, int rt
)
2928 /* If no destination, treat it as a NOP. */
2932 t0
= tcg_temp_new();
2933 t1
= tcg_temp_new();
2934 gen_load_gpr(t0
, rs
);
2935 gen_load_gpr(t1
, rt
);
2938 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2941 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2949 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2950 int rd
, int rs
, int rt
)
2955 /* If no destination, treat it as a NOP.
2956 For add & sub, we must generate the overflow exception when needed. */
2960 t0
= tcg_temp_new();
2961 t1
= tcg_temp_new();
2962 gen_load_gpr(t0
, rs
);
2963 gen_load_gpr(t1
, rt
);
2966 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2967 tcg_gen_shl_tl(t0
, t1
, t0
);
2968 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2971 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2972 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2975 tcg_gen_ext32u_tl(t1
, t1
);
2976 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2977 tcg_gen_shr_tl(t0
, t1
, t0
);
2978 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2982 TCGv_i32 t2
= tcg_temp_new_i32();
2983 TCGv_i32 t3
= tcg_temp_new_i32();
2985 tcg_gen_trunc_tl_i32(t2
, t0
);
2986 tcg_gen_trunc_tl_i32(t3
, t1
);
2987 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2988 tcg_gen_rotr_i32(t2
, t3
, t2
);
2989 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2990 tcg_temp_free_i32(t2
);
2991 tcg_temp_free_i32(t3
);
2994 #if defined(TARGET_MIPS64)
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3000 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3001 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3004 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3005 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3008 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3009 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3017 /* Arithmetic on HI/LO registers */
3018 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3020 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3031 #if defined(TARGET_MIPS64)
3033 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3037 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3041 #if defined(TARGET_MIPS64)
3043 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3047 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3052 #if defined(TARGET_MIPS64)
3054 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3058 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3061 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3066 #if defined(TARGET_MIPS64)
3068 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3072 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3075 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3081 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3084 TCGv t0
= tcg_const_tl(addr
);
3085 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3086 gen_store_gpr(t0
, reg
);
3090 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3096 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3099 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3100 addr
= addr_add(ctx
, pc
, offset
);
3101 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3105 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3106 addr
= addr_add(ctx
, pc
, offset
);
3107 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3109 #if defined(TARGET_MIPS64)
3112 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3113 addr
= addr_add(ctx
, pc
, offset
);
3114 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3118 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3121 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3122 addr
= addr_add(ctx
, pc
, offset
);
3123 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3128 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3129 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3130 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3133 #if defined(TARGET_MIPS64)
3134 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3135 case R6_OPC_LDPC
+ (1 << 16):
3136 case R6_OPC_LDPC
+ (2 << 16):
3137 case R6_OPC_LDPC
+ (3 << 16):
3139 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3140 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3141 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3145 MIPS_INVAL("OPC_PCREL");
3146 generate_exception_end(ctx
, EXCP_RI
);
3153 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3162 t0
= tcg_temp_new();
3163 t1
= tcg_temp_new();
3165 gen_load_gpr(t0
, rs
);
3166 gen_load_gpr(t1
, rt
);
3171 TCGv t2
= tcg_temp_new();
3172 TCGv t3
= tcg_temp_new();
3173 tcg_gen_ext32s_tl(t0
, t0
);
3174 tcg_gen_ext32s_tl(t1
, t1
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3176 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3177 tcg_gen_and_tl(t2
, t2
, t3
);
3178 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3179 tcg_gen_or_tl(t2
, t2
, t3
);
3180 tcg_gen_movi_tl(t3
, 0);
3181 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3182 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3183 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3190 TCGv t2
= tcg_temp_new();
3191 TCGv t3
= tcg_temp_new();
3192 tcg_gen_ext32s_tl(t0
, t0
);
3193 tcg_gen_ext32s_tl(t1
, t1
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3195 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3196 tcg_gen_and_tl(t2
, t2
, t3
);
3197 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3198 tcg_gen_or_tl(t2
, t2
, t3
);
3199 tcg_gen_movi_tl(t3
, 0);
3200 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3201 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3202 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3209 TCGv t2
= tcg_const_tl(0);
3210 TCGv t3
= tcg_const_tl(1);
3211 tcg_gen_ext32u_tl(t0
, t0
);
3212 tcg_gen_ext32u_tl(t1
, t1
);
3213 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3214 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3215 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3222 TCGv t2
= tcg_const_tl(0);
3223 TCGv t3
= tcg_const_tl(1);
3224 tcg_gen_ext32u_tl(t0
, t0
);
3225 tcg_gen_ext32u_tl(t1
, t1
);
3226 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3227 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3228 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3235 TCGv_i32 t2
= tcg_temp_new_i32();
3236 TCGv_i32 t3
= tcg_temp_new_i32();
3237 tcg_gen_trunc_tl_i32(t2
, t0
);
3238 tcg_gen_trunc_tl_i32(t3
, t1
);
3239 tcg_gen_mul_i32(t2
, t2
, t3
);
3240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3241 tcg_temp_free_i32(t2
);
3242 tcg_temp_free_i32(t3
);
3247 TCGv_i32 t2
= tcg_temp_new_i32();
3248 TCGv_i32 t3
= tcg_temp_new_i32();
3249 tcg_gen_trunc_tl_i32(t2
, t0
);
3250 tcg_gen_trunc_tl_i32(t3
, t1
);
3251 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3252 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3253 tcg_temp_free_i32(t2
);
3254 tcg_temp_free_i32(t3
);
3259 TCGv_i32 t2
= tcg_temp_new_i32();
3260 TCGv_i32 t3
= tcg_temp_new_i32();
3261 tcg_gen_trunc_tl_i32(t2
, t0
);
3262 tcg_gen_trunc_tl_i32(t3
, t1
);
3263 tcg_gen_mul_i32(t2
, t2
, t3
);
3264 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3265 tcg_temp_free_i32(t2
);
3266 tcg_temp_free_i32(t3
);
3271 TCGv_i32 t2
= tcg_temp_new_i32();
3272 TCGv_i32 t3
= tcg_temp_new_i32();
3273 tcg_gen_trunc_tl_i32(t2
, t0
);
3274 tcg_gen_trunc_tl_i32(t3
, t1
);
3275 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3276 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3277 tcg_temp_free_i32(t2
);
3278 tcg_temp_free_i32(t3
);
3281 #if defined(TARGET_MIPS64)
3284 TCGv t2
= tcg_temp_new();
3285 TCGv t3
= tcg_temp_new();
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3287 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3288 tcg_gen_and_tl(t2
, t2
, t3
);
3289 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3290 tcg_gen_or_tl(t2
, t2
, t3
);
3291 tcg_gen_movi_tl(t3
, 0);
3292 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3293 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3300 TCGv t2
= tcg_temp_new();
3301 TCGv t3
= tcg_temp_new();
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3303 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3304 tcg_gen_and_tl(t2
, t2
, t3
);
3305 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3306 tcg_gen_or_tl(t2
, t2
, t3
);
3307 tcg_gen_movi_tl(t3
, 0);
3308 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3309 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3316 TCGv t2
= tcg_const_tl(0);
3317 TCGv t3
= tcg_const_tl(1);
3318 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3319 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3326 TCGv t2
= tcg_const_tl(0);
3327 TCGv t3
= tcg_const_tl(1);
3328 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3329 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3335 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3339 TCGv t2
= tcg_temp_new();
3340 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3345 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3349 TCGv t2
= tcg_temp_new();
3350 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3356 MIPS_INVAL("r6 mul/div");
3357 generate_exception_end(ctx
, EXCP_RI
);
3365 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3366 int acc
, int rs
, int rt
)
3370 t0
= tcg_temp_new();
3371 t1
= tcg_temp_new();
3373 gen_load_gpr(t0
, rs
);
3374 gen_load_gpr(t1
, rt
);
3383 TCGv t2
= tcg_temp_new();
3384 TCGv t3
= tcg_temp_new();
3385 tcg_gen_ext32s_tl(t0
, t0
);
3386 tcg_gen_ext32s_tl(t1
, t1
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3388 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3389 tcg_gen_and_tl(t2
, t2
, t3
);
3390 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3391 tcg_gen_or_tl(t2
, t2
, t3
);
3392 tcg_gen_movi_tl(t3
, 0);
3393 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3394 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3395 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3396 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3397 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3404 TCGv t2
= tcg_const_tl(0);
3405 TCGv t3
= tcg_const_tl(1);
3406 tcg_gen_ext32u_tl(t0
, t0
);
3407 tcg_gen_ext32u_tl(t1
, t1
);
3408 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3409 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3410 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3411 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3412 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3419 TCGv_i32 t2
= tcg_temp_new_i32();
3420 TCGv_i32 t3
= tcg_temp_new_i32();
3421 tcg_gen_trunc_tl_i32(t2
, t0
);
3422 tcg_gen_trunc_tl_i32(t3
, t1
);
3423 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3424 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3425 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3426 tcg_temp_free_i32(t2
);
3427 tcg_temp_free_i32(t3
);
3432 TCGv_i32 t2
= tcg_temp_new_i32();
3433 TCGv_i32 t3
= tcg_temp_new_i32();
3434 tcg_gen_trunc_tl_i32(t2
, t0
);
3435 tcg_gen_trunc_tl_i32(t3
, t1
);
3436 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3437 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3438 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3439 tcg_temp_free_i32(t2
);
3440 tcg_temp_free_i32(t3
);
3443 #if defined(TARGET_MIPS64)
3446 TCGv t2
= tcg_temp_new();
3447 TCGv t3
= tcg_temp_new();
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3449 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3450 tcg_gen_and_tl(t2
, t2
, t3
);
3451 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3452 tcg_gen_or_tl(t2
, t2
, t3
);
3453 tcg_gen_movi_tl(t3
, 0);
3454 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3455 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3456 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3463 TCGv t2
= tcg_const_tl(0);
3464 TCGv t3
= tcg_const_tl(1);
3465 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3466 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3467 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3473 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3476 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3481 TCGv_i64 t2
= tcg_temp_new_i64();
3482 TCGv_i64 t3
= tcg_temp_new_i64();
3484 tcg_gen_ext_tl_i64(t2
, t0
);
3485 tcg_gen_ext_tl_i64(t3
, t1
);
3486 tcg_gen_mul_i64(t2
, t2
, t3
);
3487 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3488 tcg_gen_add_i64(t2
, t2
, t3
);
3489 tcg_temp_free_i64(t3
);
3490 gen_move_low32(cpu_LO
[acc
], t2
);
3491 gen_move_high32(cpu_HI
[acc
], t2
);
3492 tcg_temp_free_i64(t2
);
3497 TCGv_i64 t2
= tcg_temp_new_i64();
3498 TCGv_i64 t3
= tcg_temp_new_i64();
3500 tcg_gen_ext32u_tl(t0
, t0
);
3501 tcg_gen_ext32u_tl(t1
, t1
);
3502 tcg_gen_extu_tl_i64(t2
, t0
);
3503 tcg_gen_extu_tl_i64(t3
, t1
);
3504 tcg_gen_mul_i64(t2
, t2
, t3
);
3505 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3506 tcg_gen_add_i64(t2
, t2
, t3
);
3507 tcg_temp_free_i64(t3
);
3508 gen_move_low32(cpu_LO
[acc
], t2
);
3509 gen_move_high32(cpu_HI
[acc
], t2
);
3510 tcg_temp_free_i64(t2
);
3515 TCGv_i64 t2
= tcg_temp_new_i64();
3516 TCGv_i64 t3
= tcg_temp_new_i64();
3518 tcg_gen_ext_tl_i64(t2
, t0
);
3519 tcg_gen_ext_tl_i64(t3
, t1
);
3520 tcg_gen_mul_i64(t2
, t2
, t3
);
3521 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3522 tcg_gen_sub_i64(t2
, t3
, t2
);
3523 tcg_temp_free_i64(t3
);
3524 gen_move_low32(cpu_LO
[acc
], t2
);
3525 gen_move_high32(cpu_HI
[acc
], t2
);
3526 tcg_temp_free_i64(t2
);
3531 TCGv_i64 t2
= tcg_temp_new_i64();
3532 TCGv_i64 t3
= tcg_temp_new_i64();
3534 tcg_gen_ext32u_tl(t0
, t0
);
3535 tcg_gen_ext32u_tl(t1
, t1
);
3536 tcg_gen_extu_tl_i64(t2
, t0
);
3537 tcg_gen_extu_tl_i64(t3
, t1
);
3538 tcg_gen_mul_i64(t2
, t2
, t3
);
3539 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3540 tcg_gen_sub_i64(t2
, t3
, t2
);
3541 tcg_temp_free_i64(t3
);
3542 gen_move_low32(cpu_LO
[acc
], t2
);
3543 gen_move_high32(cpu_HI
[acc
], t2
);
3544 tcg_temp_free_i64(t2
);
3548 MIPS_INVAL("mul/div");
3549 generate_exception_end(ctx
, EXCP_RI
);
3557 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3558 int rd
, int rs
, int rt
)
3560 TCGv t0
= tcg_temp_new();
3561 TCGv t1
= tcg_temp_new();
3563 gen_load_gpr(t0
, rs
);
3564 gen_load_gpr(t1
, rt
);
3567 case OPC_VR54XX_MULS
:
3568 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MULSU
:
3571 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MACC
:
3574 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MACCU
:
3577 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MSAC
:
3580 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MSACU
:
3583 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULHI
:
3586 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MULHIU
:
3589 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MULSHI
:
3592 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MULSHIU
:
3595 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MACCHI
:
3598 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3600 case OPC_VR54XX_MACCHIU
:
3601 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3603 case OPC_VR54XX_MSACHI
:
3604 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3606 case OPC_VR54XX_MSACHIU
:
3607 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3610 MIPS_INVAL("mul vr54xx");
3611 generate_exception_end(ctx
, EXCP_RI
);
3614 gen_store_gpr(t0
, rd
);
3621 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3631 gen_load_gpr(t0
, rs
);
3636 #if defined(TARGET_MIPS64)
3640 tcg_gen_not_tl(t0
, t0
);
3649 tcg_gen_ext32u_tl(t0
, t0
);
3650 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3651 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3653 #if defined(TARGET_MIPS64)
3658 tcg_gen_clzi_i64(t0
, t0
, 64);
3664 /* Godson integer instructions */
3665 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3666 int rd
, int rs
, int rt
)
3678 case OPC_MULTU_G_2E
:
3679 case OPC_MULTU_G_2F
:
3680 #if defined(TARGET_MIPS64)
3681 case OPC_DMULT_G_2E
:
3682 case OPC_DMULT_G_2F
:
3683 case OPC_DMULTU_G_2E
:
3684 case OPC_DMULTU_G_2F
:
3686 t0
= tcg_temp_new();
3687 t1
= tcg_temp_new();
3690 t0
= tcg_temp_local_new();
3691 t1
= tcg_temp_local_new();
3695 gen_load_gpr(t0
, rs
);
3696 gen_load_gpr(t1
, rt
);
3701 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3702 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3704 case OPC_MULTU_G_2E
:
3705 case OPC_MULTU_G_2F
:
3706 tcg_gen_ext32u_tl(t0
, t0
);
3707 tcg_gen_ext32u_tl(t1
, t1
);
3708 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3709 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3714 TCGLabel
*l1
= gen_new_label();
3715 TCGLabel
*l2
= gen_new_label();
3716 TCGLabel
*l3
= gen_new_label();
3717 tcg_gen_ext32s_tl(t0
, t0
);
3718 tcg_gen_ext32s_tl(t1
, t1
);
3719 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3720 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3723 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3724 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3725 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3728 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3729 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3736 TCGLabel
*l1
= gen_new_label();
3737 TCGLabel
*l2
= gen_new_label();
3738 tcg_gen_ext32u_tl(t0
, t0
);
3739 tcg_gen_ext32u_tl(t1
, t1
);
3740 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3741 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3744 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3745 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3752 TCGLabel
*l1
= gen_new_label();
3753 TCGLabel
*l2
= gen_new_label();
3754 TCGLabel
*l3
= gen_new_label();
3755 tcg_gen_ext32u_tl(t0
, t0
);
3756 tcg_gen_ext32u_tl(t1
, t1
);
3757 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3758 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3759 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3761 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3764 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3765 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3772 TCGLabel
*l1
= gen_new_label();
3773 TCGLabel
*l2
= gen_new_label();
3774 tcg_gen_ext32u_tl(t0
, t0
);
3775 tcg_gen_ext32u_tl(t1
, t1
);
3776 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3777 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3780 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3781 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3785 #if defined(TARGET_MIPS64)
3786 case OPC_DMULT_G_2E
:
3787 case OPC_DMULT_G_2F
:
3788 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3790 case OPC_DMULTU_G_2E
:
3791 case OPC_DMULTU_G_2F
:
3792 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3797 TCGLabel
*l1
= gen_new_label();
3798 TCGLabel
*l2
= gen_new_label();
3799 TCGLabel
*l3
= gen_new_label();
3800 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3801 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3805 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3806 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3809 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3813 case OPC_DDIVU_G_2E
:
3814 case OPC_DDIVU_G_2F
:
3816 TCGLabel
*l1
= gen_new_label();
3817 TCGLabel
*l2
= gen_new_label();
3818 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3819 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3822 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3829 TCGLabel
*l1
= gen_new_label();
3830 TCGLabel
*l2
= gen_new_label();
3831 TCGLabel
*l3
= gen_new_label();
3832 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3833 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3834 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3836 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3839 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3843 case OPC_DMODU_G_2E
:
3844 case OPC_DMODU_G_2F
:
3846 TCGLabel
*l1
= gen_new_label();
3847 TCGLabel
*l2
= gen_new_label();
3848 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3849 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3852 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3863 /* Loongson multimedia instructions */
3864 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3866 uint32_t opc
, shift_max
;
3869 opc
= MASK_LMI(ctx
->opcode
);
3875 t0
= tcg_temp_local_new_i64();
3876 t1
= tcg_temp_local_new_i64();
3879 t0
= tcg_temp_new_i64();
3880 t1
= tcg_temp_new_i64();
3884 check_cp1_enabled(ctx
);
3885 gen_load_fpr64(ctx
, t0
, rs
);
3886 gen_load_fpr64(ctx
, t1
, rt
);
3888 #define LMI_HELPER(UP, LO) \
3889 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3890 #define LMI_HELPER_1(UP, LO) \
3891 case OPC_##UP: gen_helper_##LO(t0, t0); break
3892 #define LMI_DIRECT(UP, LO, OP) \
3893 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3896 LMI_HELPER(PADDSH
, paddsh
);
3897 LMI_HELPER(PADDUSH
, paddush
);
3898 LMI_HELPER(PADDH
, paddh
);
3899 LMI_HELPER(PADDW
, paddw
);
3900 LMI_HELPER(PADDSB
, paddsb
);
3901 LMI_HELPER(PADDUSB
, paddusb
);
3902 LMI_HELPER(PADDB
, paddb
);
3904 LMI_HELPER(PSUBSH
, psubsh
);
3905 LMI_HELPER(PSUBUSH
, psubush
);
3906 LMI_HELPER(PSUBH
, psubh
);
3907 LMI_HELPER(PSUBW
, psubw
);
3908 LMI_HELPER(PSUBSB
, psubsb
);
3909 LMI_HELPER(PSUBUSB
, psubusb
);
3910 LMI_HELPER(PSUBB
, psubb
);
3912 LMI_HELPER(PSHUFH
, pshufh
);
3913 LMI_HELPER(PACKSSWH
, packsswh
);
3914 LMI_HELPER(PACKSSHB
, packsshb
);
3915 LMI_HELPER(PACKUSHB
, packushb
);
3917 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3918 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3919 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3920 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3921 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3922 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3924 LMI_HELPER(PAVGH
, pavgh
);
3925 LMI_HELPER(PAVGB
, pavgb
);
3926 LMI_HELPER(PMAXSH
, pmaxsh
);
3927 LMI_HELPER(PMINSH
, pminsh
);
3928 LMI_HELPER(PMAXUB
, pmaxub
);
3929 LMI_HELPER(PMINUB
, pminub
);
3931 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3932 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3933 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3934 LMI_HELPER(PCMPGTH
, pcmpgth
);
3935 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3936 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3938 LMI_HELPER(PSLLW
, psllw
);
3939 LMI_HELPER(PSLLH
, psllh
);
3940 LMI_HELPER(PSRLW
, psrlw
);
3941 LMI_HELPER(PSRLH
, psrlh
);
3942 LMI_HELPER(PSRAW
, psraw
);
3943 LMI_HELPER(PSRAH
, psrah
);
3945 LMI_HELPER(PMULLH
, pmullh
);
3946 LMI_HELPER(PMULHH
, pmulhh
);
3947 LMI_HELPER(PMULHUH
, pmulhuh
);
3948 LMI_HELPER(PMADDHW
, pmaddhw
);
3950 LMI_HELPER(PASUBUB
, pasubub
);
3951 LMI_HELPER_1(BIADD
, biadd
);
3952 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3954 LMI_DIRECT(PADDD
, paddd
, add
);
3955 LMI_DIRECT(PSUBD
, psubd
, sub
);
3956 LMI_DIRECT(XOR_CP2
, xor, xor);
3957 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3958 LMI_DIRECT(AND_CP2
, and, and);
3959 LMI_DIRECT(OR_CP2
, or, or);
3962 tcg_gen_andc_i64(t0
, t1
, t0
);
3966 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3969 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3972 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3975 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3979 tcg_gen_andi_i64(t1
, t1
, 3);
3980 tcg_gen_shli_i64(t1
, t1
, 4);
3981 tcg_gen_shr_i64(t0
, t0
, t1
);
3982 tcg_gen_ext16u_i64(t0
, t0
);
3986 tcg_gen_add_i64(t0
, t0
, t1
);
3987 tcg_gen_ext32s_i64(t0
, t0
);
3990 tcg_gen_sub_i64(t0
, t0
, t1
);
3991 tcg_gen_ext32s_i64(t0
, t0
);
4013 /* Make sure shift count isn't TCG undefined behaviour. */
4014 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4019 tcg_gen_shl_i64(t0
, t0
, t1
);
4023 /* Since SRA is UndefinedResult without sign-extended inputs,
4024 we can treat SRA and DSRA the same. */
4025 tcg_gen_sar_i64(t0
, t0
, t1
);
4028 /* We want to shift in zeros for SRL; zero-extend first. */
4029 tcg_gen_ext32u_i64(t0
, t0
);
4032 tcg_gen_shr_i64(t0
, t0
, t1
);
4036 if (shift_max
== 32) {
4037 tcg_gen_ext32s_i64(t0
, t0
);
4040 /* Shifts larger than MAX produce zero. */
4041 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4042 tcg_gen_neg_i64(t1
, t1
);
4043 tcg_gen_and_i64(t0
, t0
, t1
);
4049 TCGv_i64 t2
= tcg_temp_new_i64();
4050 TCGLabel
*lab
= gen_new_label();
4052 tcg_gen_mov_i64(t2
, t0
);
4053 tcg_gen_add_i64(t0
, t1
, t2
);
4054 if (opc
== OPC_ADD_CP2
) {
4055 tcg_gen_ext32s_i64(t0
, t0
);
4057 tcg_gen_xor_i64(t1
, t1
, t2
);
4058 tcg_gen_xor_i64(t2
, t2
, t0
);
4059 tcg_gen_andc_i64(t1
, t2
, t1
);
4060 tcg_temp_free_i64(t2
);
4061 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4062 generate_exception(ctx
, EXCP_OVERFLOW
);
4070 TCGv_i64 t2
= tcg_temp_new_i64();
4071 TCGLabel
*lab
= gen_new_label();
4073 tcg_gen_mov_i64(t2
, t0
);
4074 tcg_gen_sub_i64(t0
, t1
, t2
);
4075 if (opc
== OPC_SUB_CP2
) {
4076 tcg_gen_ext32s_i64(t0
, t0
);
4078 tcg_gen_xor_i64(t1
, t1
, t2
);
4079 tcg_gen_xor_i64(t2
, t2
, t0
);
4080 tcg_gen_and_i64(t1
, t1
, t2
);
4081 tcg_temp_free_i64(t2
);
4082 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4083 generate_exception(ctx
, EXCP_OVERFLOW
);
4089 tcg_gen_ext32u_i64(t0
, t0
);
4090 tcg_gen_ext32u_i64(t1
, t1
);
4091 tcg_gen_mul_i64(t0
, t0
, t1
);
4100 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4101 FD field is the CC field? */
4103 MIPS_INVAL("loongson_cp2");
4104 generate_exception_end(ctx
, EXCP_RI
);
4111 gen_store_fpr64(ctx
, t0
, rd
);
4113 tcg_temp_free_i64(t0
);
4114 tcg_temp_free_i64(t1
);
4118 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4119 int rs
, int rt
, int16_t imm
)
4122 TCGv t0
= tcg_temp_new();
4123 TCGv t1
= tcg_temp_new();
4126 /* Load needed operands */
4134 /* Compare two registers */
4136 gen_load_gpr(t0
, rs
);
4137 gen_load_gpr(t1
, rt
);
4147 /* Compare register to immediate */
4148 if (rs
!= 0 || imm
!= 0) {
4149 gen_load_gpr(t0
, rs
);
4150 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4157 case OPC_TEQ
: /* rs == rs */
4158 case OPC_TEQI
: /* r0 == 0 */
4159 case OPC_TGE
: /* rs >= rs */
4160 case OPC_TGEI
: /* r0 >= 0 */
4161 case OPC_TGEU
: /* rs >= rs unsigned */
4162 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4164 generate_exception_end(ctx
, EXCP_TRAP
);
4166 case OPC_TLT
: /* rs < rs */
4167 case OPC_TLTI
: /* r0 < 0 */
4168 case OPC_TLTU
: /* rs < rs unsigned */
4169 case OPC_TLTIU
: /* r0 < 0 unsigned */
4170 case OPC_TNE
: /* rs != rs */
4171 case OPC_TNEI
: /* r0 != 0 */
4172 /* Never trap: treat as NOP. */
4176 TCGLabel
*l1
= gen_new_label();
4181 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4185 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4189 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4193 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4197 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4201 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4204 generate_exception(ctx
, EXCP_TRAP
);
4211 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4213 if (unlikely(ctx
->singlestep_enabled
)) {
4217 #ifndef CONFIG_USER_ONLY
4218 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4224 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4226 if (use_goto_tb(ctx
, dest
)) {
4229 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4232 if (ctx
->singlestep_enabled
) {
4233 save_cpu_state(ctx
, 0);
4234 gen_helper_raise_exception_debug(cpu_env
);
4236 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
4240 /* Branches (before delay slot) */
4241 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4243 int rs
, int rt
, int32_t offset
,
4246 target_ulong btgt
= -1;
4248 int bcond_compute
= 0;
4249 TCGv t0
= tcg_temp_new();
4250 TCGv t1
= tcg_temp_new();
4252 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4253 #ifdef MIPS_DEBUG_DISAS
4254 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4255 TARGET_FMT_lx
"\n", ctx
->pc
);
4257 generate_exception_end(ctx
, EXCP_RI
);
4261 /* Load needed operands */
4267 /* Compare two registers */
4269 gen_load_gpr(t0
, rs
);
4270 gen_load_gpr(t1
, rt
);
4273 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4287 /* Compare to zero */
4289 gen_load_gpr(t0
, rs
);
4292 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4295 #if defined(TARGET_MIPS64)
4297 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4299 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4302 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4307 /* Jump to immediate */
4308 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4312 /* Jump to register */
4313 if (offset
!= 0 && offset
!= 16) {
4314 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4315 others are reserved. */
4316 MIPS_INVAL("jump hint");
4317 generate_exception_end(ctx
, EXCP_RI
);
4320 gen_load_gpr(btarget
, rs
);
4323 MIPS_INVAL("branch/jump");
4324 generate_exception_end(ctx
, EXCP_RI
);
4327 if (bcond_compute
== 0) {
4328 /* No condition to be computed */
4330 case OPC_BEQ
: /* rx == rx */
4331 case OPC_BEQL
: /* rx == rx likely */
4332 case OPC_BGEZ
: /* 0 >= 0 */
4333 case OPC_BGEZL
: /* 0 >= 0 likely */
4334 case OPC_BLEZ
: /* 0 <= 0 */
4335 case OPC_BLEZL
: /* 0 <= 0 likely */
4337 ctx
->hflags
|= MIPS_HFLAG_B
;
4339 case OPC_BGEZAL
: /* 0 >= 0 */
4340 case OPC_BGEZALL
: /* 0 >= 0 likely */
4341 /* Always take and link */
4343 ctx
->hflags
|= MIPS_HFLAG_B
;
4345 case OPC_BNE
: /* rx != rx */
4346 case OPC_BGTZ
: /* 0 > 0 */
4347 case OPC_BLTZ
: /* 0 < 0 */
4350 case OPC_BLTZAL
: /* 0 < 0 */
4351 /* Handle as an unconditional branch to get correct delay
4354 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4355 ctx
->hflags
|= MIPS_HFLAG_B
;
4357 case OPC_BLTZALL
: /* 0 < 0 likely */
4358 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4359 /* Skip the instruction in the delay slot */
4362 case OPC_BNEL
: /* rx != rx likely */
4363 case OPC_BGTZL
: /* 0 > 0 likely */
4364 case OPC_BLTZL
: /* 0 < 0 likely */
4365 /* Skip the instruction in the delay slot */
4369 ctx
->hflags
|= MIPS_HFLAG_B
;
4372 ctx
->hflags
|= MIPS_HFLAG_BX
;
4376 ctx
->hflags
|= MIPS_HFLAG_B
;
4379 ctx
->hflags
|= MIPS_HFLAG_BR
;
4383 ctx
->hflags
|= MIPS_HFLAG_BR
;
4386 MIPS_INVAL("branch/jump");
4387 generate_exception_end(ctx
, EXCP_RI
);
4393 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4396 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4399 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4402 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4405 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4411 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4415 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4419 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4422 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4425 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4428 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4431 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4434 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4437 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4439 #if defined(TARGET_MIPS64)
4441 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4445 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4448 ctx
->hflags
|= MIPS_HFLAG_BC
;
4451 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4454 ctx
->hflags
|= MIPS_HFLAG_BL
;
4457 MIPS_INVAL("conditional branch/jump");
4458 generate_exception_end(ctx
, EXCP_RI
);
4463 ctx
->btarget
= btgt
;
4465 switch (delayslot_size
) {
4467 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4470 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4475 int post_delay
= insn_bytes
+ delayslot_size
;
4476 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4478 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4482 if (insn_bytes
== 2)
4483 ctx
->hflags
|= MIPS_HFLAG_B16
;
4488 /* special3 bitfield operations */
4489 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4490 int rs
, int lsb
, int msb
)
4492 TCGv t0
= tcg_temp_new();
4493 TCGv t1
= tcg_temp_new();
4495 gen_load_gpr(t1
, rs
);
4498 if (lsb
+ msb
> 31) {
4502 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4504 /* The two checks together imply that lsb == 0,
4505 so this is a simple sign-extension. */
4506 tcg_gen_ext32s_tl(t0
, t1
);
4509 #if defined(TARGET_MIPS64)
4518 if (lsb
+ msb
> 63) {
4521 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4528 gen_load_gpr(t0
, rt
);
4529 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4530 tcg_gen_ext32s_tl(t0
, t0
);
4532 #if defined(TARGET_MIPS64)
4543 gen_load_gpr(t0
, rt
);
4544 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4549 MIPS_INVAL("bitops");
4550 generate_exception_end(ctx
, EXCP_RI
);
4555 gen_store_gpr(t0
, rt
);
4560 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4565 /* If no destination, treat it as a NOP. */
4569 t0
= tcg_temp_new();
4570 gen_load_gpr(t0
, rt
);
4574 TCGv t1
= tcg_temp_new();
4576 tcg_gen_shri_tl(t1
, t0
, 8);
4577 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4578 tcg_gen_shli_tl(t0
, t0
, 8);
4579 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4580 tcg_gen_or_tl(t0
, t0
, t1
);
4582 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4586 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4589 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4591 #if defined(TARGET_MIPS64)
4594 TCGv t1
= tcg_temp_new();
4596 tcg_gen_shri_tl(t1
, t0
, 8);
4597 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4598 tcg_gen_shli_tl(t0
, t0
, 8);
4599 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4600 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4606 TCGv t1
= tcg_temp_new();
4608 tcg_gen_shri_tl(t1
, t0
, 16);
4609 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4610 tcg_gen_shli_tl(t0
, t0
, 16);
4611 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4612 tcg_gen_or_tl(t0
, t0
, t1
);
4613 tcg_gen_shri_tl(t1
, t0
, 32);
4614 tcg_gen_shli_tl(t0
, t0
, 32);
4615 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4621 MIPS_INVAL("bsfhl");
4622 generate_exception_end(ctx
, EXCP_RI
);
4629 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4638 t0
= tcg_temp_new();
4639 t1
= tcg_temp_new();
4640 gen_load_gpr(t0
, rs
);
4641 gen_load_gpr(t1
, rt
);
4642 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4643 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4644 if (opc
== OPC_LSA
) {
4645 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4654 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4662 t0
= tcg_temp_new();
4663 gen_load_gpr(t0
, rt
);
4667 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4669 #if defined(TARGET_MIPS64)
4671 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4676 TCGv t1
= tcg_temp_new();
4677 gen_load_gpr(t1
, rs
);
4681 TCGv_i64 t2
= tcg_temp_new_i64();
4682 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4683 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4684 gen_move_low32(cpu_gpr
[rd
], t2
);
4685 tcg_temp_free_i64(t2
);
4688 #if defined(TARGET_MIPS64)
4690 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4691 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4692 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4702 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4709 t0
= tcg_temp_new();
4710 gen_load_gpr(t0
, rt
);
4713 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4715 #if defined(TARGET_MIPS64)
4717 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4724 #ifndef CONFIG_USER_ONLY
4725 /* CP0 (MMU and control) */
4726 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4728 TCGv_i64 t0
= tcg_temp_new_i64();
4729 TCGv_i64 t1
= tcg_temp_new_i64();
4731 tcg_gen_ext_tl_i64(t0
, arg
);
4732 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4733 #if defined(TARGET_MIPS64)
4734 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4736 tcg_gen_concat32_i64(t1
, t1
, t0
);
4738 tcg_gen_st_i64(t1
, cpu_env
, off
);
4739 tcg_temp_free_i64(t1
);
4740 tcg_temp_free_i64(t0
);
4743 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4745 TCGv_i64 t0
= tcg_temp_new_i64();
4746 TCGv_i64 t1
= tcg_temp_new_i64();
4748 tcg_gen_ext_tl_i64(t0
, arg
);
4749 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4750 tcg_gen_concat32_i64(t1
, t1
, t0
);
4751 tcg_gen_st_i64(t1
, cpu_env
, off
);
4752 tcg_temp_free_i64(t1
);
4753 tcg_temp_free_i64(t0
);
4756 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4758 TCGv_i64 t0
= tcg_temp_new_i64();
4760 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4761 #if defined(TARGET_MIPS64)
4762 tcg_gen_shri_i64(t0
, t0
, 30);
4764 tcg_gen_shri_i64(t0
, t0
, 32);
4766 gen_move_low32(arg
, t0
);
4767 tcg_temp_free_i64(t0
);
4770 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4772 TCGv_i64 t0
= tcg_temp_new_i64();
4774 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4775 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4776 gen_move_low32(arg
, t0
);
4777 tcg_temp_free_i64(t0
);
4780 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4782 TCGv_i32 t0
= tcg_temp_new_i32();
4784 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4785 tcg_gen_ext_i32_tl(arg
, t0
);
4786 tcg_temp_free_i32(t0
);
4789 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4791 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4792 tcg_gen_ext32s_tl(arg
, arg
);
4795 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4797 TCGv_i32 t0
= tcg_temp_new_i32();
4799 tcg_gen_trunc_tl_i32(t0
, arg
);
4800 tcg_gen_st_i32(t0
, cpu_env
, off
);
4801 tcg_temp_free_i32(t0
);
4804 #define CP0_CHECK(c) \
4807 goto cp0_unimplemented; \
4811 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4813 const char *rn
= "invalid";
4815 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4821 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4825 goto cp0_unimplemented
;
4831 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4835 goto cp0_unimplemented
;
4841 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4842 ctx
->CP0_LLAddr_shift
);
4846 CP0_CHECK(ctx
->mrp
);
4847 gen_helper_mfhc0_maar(arg
, cpu_env
);
4851 goto cp0_unimplemented
;
4860 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4864 goto cp0_unimplemented
;
4868 goto cp0_unimplemented
;
4870 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4874 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4875 tcg_gen_movi_tl(arg
, 0);
4878 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4880 const char *rn
= "invalid";
4881 uint64_t mask
= ctx
->PAMask
>> 36;
4883 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4889 tcg_gen_andi_tl(arg
, arg
, mask
);
4890 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4894 goto cp0_unimplemented
;
4900 tcg_gen_andi_tl(arg
, arg
, mask
);
4901 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4905 goto cp0_unimplemented
;
4911 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4912 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4913 relevant for modern MIPS cores supporting MTHC0, therefore
4914 treating MTHC0 to LLAddr as NOP. */
4918 CP0_CHECK(ctx
->mrp
);
4919 gen_helper_mthc0_maar(cpu_env
, arg
);
4923 goto cp0_unimplemented
;
4932 tcg_gen_andi_tl(arg
, arg
, mask
);
4933 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4937 goto cp0_unimplemented
;
4941 goto cp0_unimplemented
;
4943 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
4946 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4949 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4951 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4952 tcg_gen_movi_tl(arg
, 0);
4954 tcg_gen_movi_tl(arg
, ~0);
4958 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4960 const char *rn
= "invalid";
4963 check_insn(ctx
, ISA_MIPS32
);
4969 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4979 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4984 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4989 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4993 goto cp0_unimplemented
;
4999 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5000 gen_helper_mfc0_random(arg
, cpu_env
);
5004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5005 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5009 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5010 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5014 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5015 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5019 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5020 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5024 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5025 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5029 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5030 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5031 rn
= "VPEScheFBack";
5034 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5035 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5039 goto cp0_unimplemented
;
5046 TCGv_i64 tmp
= tcg_temp_new_i64();
5047 tcg_gen_ld_i64(tmp
, cpu_env
,
5048 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5049 #if defined(TARGET_MIPS64)
5051 /* Move RI/XI fields to bits 31:30 */
5052 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5053 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5056 gen_move_low32(arg
, tmp
);
5057 tcg_temp_free_i64(tmp
);
5062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5063 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5067 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5068 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5072 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5073 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5077 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5078 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5082 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5083 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5087 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5088 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5092 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5093 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5097 goto cp0_unimplemented
;
5104 TCGv_i64 tmp
= tcg_temp_new_i64();
5105 tcg_gen_ld_i64(tmp
, cpu_env
,
5106 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5107 #if defined(TARGET_MIPS64)
5109 /* Move RI/XI fields to bits 31:30 */
5110 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5111 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5114 gen_move_low32(arg
, tmp
);
5115 tcg_temp_free_i64(tmp
);
5121 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5122 rn
= "GlobalNumber";
5125 goto cp0_unimplemented
;
5131 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5132 tcg_gen_ext32s_tl(arg
, arg
);
5136 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5137 rn
= "ContextConfig";
5138 goto cp0_unimplemented
;
5140 CP0_CHECK(ctx
->ulri
);
5141 tcg_gen_ld32s_tl(arg
, cpu_env
,
5142 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5146 goto cp0_unimplemented
;
5152 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5156 check_insn(ctx
, ISA_MIPS32R2
);
5157 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5161 goto cp0_unimplemented
;
5167 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5171 check_insn(ctx
, ISA_MIPS32R2
);
5172 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5176 check_insn(ctx
, ISA_MIPS32R2
);
5177 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5181 check_insn(ctx
, ISA_MIPS32R2
);
5182 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5186 check_insn(ctx
, ISA_MIPS32R2
);
5187 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5191 check_insn(ctx
, ISA_MIPS32R2
);
5192 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5196 goto cp0_unimplemented
;
5202 check_insn(ctx
, ISA_MIPS32R2
);
5203 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5207 goto cp0_unimplemented
;
5213 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5214 tcg_gen_ext32s_tl(arg
, arg
);
5219 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5224 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5228 goto cp0_unimplemented
;
5234 /* Mark as an IO operation because we read the time. */
5235 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5238 gen_helper_mfc0_count(arg
, cpu_env
);
5239 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5242 /* Break the TB to be able to take timer interrupts immediately
5243 after reading count. */
5244 ctx
->bstate
= BS_STOP
;
5247 /* 6,7 are implementation dependent */
5249 goto cp0_unimplemented
;
5255 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5256 tcg_gen_ext32s_tl(arg
, arg
);
5260 goto cp0_unimplemented
;
5266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5269 /* 6,7 are implementation dependent */
5271 goto cp0_unimplemented
;
5277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5281 check_insn(ctx
, ISA_MIPS32R2
);
5282 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5286 check_insn(ctx
, ISA_MIPS32R2
);
5287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5291 check_insn(ctx
, ISA_MIPS32R2
);
5292 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5296 goto cp0_unimplemented
;
5302 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5306 goto cp0_unimplemented
;
5312 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5313 tcg_gen_ext32s_tl(arg
, arg
);
5317 goto cp0_unimplemented
;
5323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5327 check_insn(ctx
, ISA_MIPS32R2
);
5328 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5332 check_insn(ctx
, ISA_MIPS32R2
);
5333 CP0_CHECK(ctx
->cmgcr
);
5334 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5335 tcg_gen_ext32s_tl(arg
, arg
);
5339 goto cp0_unimplemented
;
5345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5357 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5361 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5368 /* 6,7 are implementation dependent */
5370 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5374 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5378 goto cp0_unimplemented
;
5384 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5388 CP0_CHECK(ctx
->mrp
);
5389 gen_helper_mfc0_maar(arg
, cpu_env
);
5393 CP0_CHECK(ctx
->mrp
);
5394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5398 goto cp0_unimplemented
;
5404 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5408 goto cp0_unimplemented
;
5414 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5418 goto cp0_unimplemented
;
5424 #if defined(TARGET_MIPS64)
5425 check_insn(ctx
, ISA_MIPS3
);
5426 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5427 tcg_gen_ext32s_tl(arg
, arg
);
5432 goto cp0_unimplemented
;
5436 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5437 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5444 goto cp0_unimplemented
;
5448 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5449 rn
= "'Diagnostic"; /* implementation dependent */
5454 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5458 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5459 rn
= "TraceControl";
5460 goto cp0_unimplemented
;
5462 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5463 rn
= "TraceControl2";
5464 goto cp0_unimplemented
;
5466 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5467 rn
= "UserTraceData";
5468 goto cp0_unimplemented
;
5470 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5472 goto cp0_unimplemented
;
5474 goto cp0_unimplemented
;
5481 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5482 tcg_gen_ext32s_tl(arg
, arg
);
5486 goto cp0_unimplemented
;
5492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5493 rn
= "Performance0";
5496 // gen_helper_mfc0_performance1(arg);
5497 rn
= "Performance1";
5498 goto cp0_unimplemented
;
5500 // gen_helper_mfc0_performance2(arg);
5501 rn
= "Performance2";
5502 goto cp0_unimplemented
;
5504 // gen_helper_mfc0_performance3(arg);
5505 rn
= "Performance3";
5506 goto cp0_unimplemented
;
5508 // gen_helper_mfc0_performance4(arg);
5509 rn
= "Performance4";
5510 goto cp0_unimplemented
;
5512 // gen_helper_mfc0_performance5(arg);
5513 rn
= "Performance5";
5514 goto cp0_unimplemented
;
5516 // gen_helper_mfc0_performance6(arg);
5517 rn
= "Performance6";
5518 goto cp0_unimplemented
;
5520 // gen_helper_mfc0_performance7(arg);
5521 rn
= "Performance7";
5522 goto cp0_unimplemented
;
5524 goto cp0_unimplemented
;
5530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5534 goto cp0_unimplemented
;
5540 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5544 goto cp0_unimplemented
;
5554 TCGv_i64 tmp
= tcg_temp_new_i64();
5555 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5556 gen_move_low32(arg
, tmp
);
5557 tcg_temp_free_i64(tmp
);
5565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5569 goto cp0_unimplemented
;
5578 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5589 goto cp0_unimplemented
;
5595 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5596 tcg_gen_ext32s_tl(arg
, arg
);
5600 goto cp0_unimplemented
;
5607 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5611 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5612 tcg_gen_ld_tl(arg
, cpu_env
,
5613 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5614 tcg_gen_ext32s_tl(arg
, arg
);
5618 goto cp0_unimplemented
;
5622 goto cp0_unimplemented
;
5624 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5628 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5629 gen_mfc0_unimplemented(ctx
, arg
);
5632 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5634 const char *rn
= "invalid";
5637 check_insn(ctx
, ISA_MIPS32
);
5639 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5647 gen_helper_mtc0_index(cpu_env
, arg
);
5651 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5652 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5656 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5661 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5671 goto cp0_unimplemented
;
5681 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5682 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5686 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5687 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5691 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5692 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5696 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5697 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5701 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5702 tcg_gen_st_tl(arg
, cpu_env
,
5703 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5707 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5708 tcg_gen_st_tl(arg
, cpu_env
,
5709 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5710 rn
= "VPEScheFBack";
5713 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5714 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5718 goto cp0_unimplemented
;
5724 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5728 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5729 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5733 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5734 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5738 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5739 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5743 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5744 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5748 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5749 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5753 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5754 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5758 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5759 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5763 goto cp0_unimplemented
;
5769 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5775 rn
= "GlobalNumber";
5778 goto cp0_unimplemented
;
5784 gen_helper_mtc0_context(cpu_env
, arg
);
5788 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5789 rn
= "ContextConfig";
5790 goto cp0_unimplemented
;
5792 CP0_CHECK(ctx
->ulri
);
5793 tcg_gen_st_tl(arg
, cpu_env
,
5794 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5798 goto cp0_unimplemented
;
5804 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5808 check_insn(ctx
, ISA_MIPS32R2
);
5809 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5811 ctx
->bstate
= BS_STOP
;
5814 goto cp0_unimplemented
;
5820 gen_helper_mtc0_wired(cpu_env
, arg
);
5824 check_insn(ctx
, ISA_MIPS32R2
);
5825 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5829 check_insn(ctx
, ISA_MIPS32R2
);
5830 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5834 check_insn(ctx
, ISA_MIPS32R2
);
5835 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5839 check_insn(ctx
, ISA_MIPS32R2
);
5840 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5844 check_insn(ctx
, ISA_MIPS32R2
);
5845 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5849 goto cp0_unimplemented
;
5855 check_insn(ctx
, ISA_MIPS32R2
);
5856 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5857 ctx
->bstate
= BS_STOP
;
5861 goto cp0_unimplemented
;
5879 goto cp0_unimplemented
;
5885 gen_helper_mtc0_count(cpu_env
, arg
);
5888 /* 6,7 are implementation dependent */
5890 goto cp0_unimplemented
;
5896 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5900 goto cp0_unimplemented
;
5906 gen_helper_mtc0_compare(cpu_env
, arg
);
5909 /* 6,7 are implementation dependent */
5911 goto cp0_unimplemented
;
5917 save_cpu_state(ctx
, 1);
5918 gen_helper_mtc0_status(cpu_env
, arg
);
5919 /* BS_STOP isn't good enough here, hflags may have changed. */
5920 gen_save_pc(ctx
->pc
+ 4);
5921 ctx
->bstate
= BS_EXCP
;
5925 check_insn(ctx
, ISA_MIPS32R2
);
5926 gen_helper_mtc0_intctl(cpu_env
, arg
);
5927 /* Stop translation as we may have switched the execution mode */
5928 ctx
->bstate
= BS_STOP
;
5932 check_insn(ctx
, ISA_MIPS32R2
);
5933 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5934 /* Stop translation as we may have switched the execution mode */
5935 ctx
->bstate
= BS_STOP
;
5939 check_insn(ctx
, ISA_MIPS32R2
);
5940 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5941 /* Stop translation as we may have switched the execution mode */
5942 ctx
->bstate
= BS_STOP
;
5946 goto cp0_unimplemented
;
5952 save_cpu_state(ctx
, 1);
5953 gen_helper_mtc0_cause(cpu_env
, arg
);
5957 goto cp0_unimplemented
;
5963 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5967 goto cp0_unimplemented
;
5977 check_insn(ctx
, ISA_MIPS32R2
);
5978 gen_helper_mtc0_ebase(cpu_env
, arg
);
5982 goto cp0_unimplemented
;
5988 gen_helper_mtc0_config0(cpu_env
, arg
);
5990 /* Stop translation as we may have switched the execution mode */
5991 ctx
->bstate
= BS_STOP
;
5994 /* ignored, read only */
5998 gen_helper_mtc0_config2(cpu_env
, arg
);
6000 /* Stop translation as we may have switched the execution mode */
6001 ctx
->bstate
= BS_STOP
;
6004 gen_helper_mtc0_config3(cpu_env
, arg
);
6006 /* Stop translation as we may have switched the execution mode */
6007 ctx
->bstate
= BS_STOP
;
6010 gen_helper_mtc0_config4(cpu_env
, arg
);
6012 ctx
->bstate
= BS_STOP
;
6015 gen_helper_mtc0_config5(cpu_env
, arg
);
6017 /* Stop translation as we may have switched the execution mode */
6018 ctx
->bstate
= BS_STOP
;
6020 /* 6,7 are implementation dependent */
6030 rn
= "Invalid config selector";
6031 goto cp0_unimplemented
;
6037 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6041 CP0_CHECK(ctx
->mrp
);
6042 gen_helper_mtc0_maar(cpu_env
, arg
);
6046 CP0_CHECK(ctx
->mrp
);
6047 gen_helper_mtc0_maari(cpu_env
, arg
);
6051 goto cp0_unimplemented
;
6057 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6061 goto cp0_unimplemented
;
6067 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6071 goto cp0_unimplemented
;
6077 #if defined(TARGET_MIPS64)
6078 check_insn(ctx
, ISA_MIPS3
);
6079 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6084 goto cp0_unimplemented
;
6088 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6089 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6092 gen_helper_mtc0_framemask(cpu_env
, arg
);
6096 goto cp0_unimplemented
;
6101 rn
= "Diagnostic"; /* implementation dependent */
6106 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6107 /* BS_STOP isn't good enough here, hflags may have changed. */
6108 gen_save_pc(ctx
->pc
+ 4);
6109 ctx
->bstate
= BS_EXCP
;
6113 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6114 rn
= "TraceControl";
6115 /* Stop translation as we may have switched the execution mode */
6116 ctx
->bstate
= BS_STOP
;
6117 goto cp0_unimplemented
;
6119 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6120 rn
= "TraceControl2";
6121 /* Stop translation as we may have switched the execution mode */
6122 ctx
->bstate
= BS_STOP
;
6123 goto cp0_unimplemented
;
6125 /* Stop translation as we may have switched the execution mode */
6126 ctx
->bstate
= BS_STOP
;
6127 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6128 rn
= "UserTraceData";
6129 /* Stop translation as we may have switched the execution mode */
6130 ctx
->bstate
= BS_STOP
;
6131 goto cp0_unimplemented
;
6133 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6134 /* Stop translation as we may have switched the execution mode */
6135 ctx
->bstate
= BS_STOP
;
6137 goto cp0_unimplemented
;
6139 goto cp0_unimplemented
;
6146 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6150 goto cp0_unimplemented
;
6156 gen_helper_mtc0_performance0(cpu_env
, arg
);
6157 rn
= "Performance0";
6160 // gen_helper_mtc0_performance1(arg);
6161 rn
= "Performance1";
6162 goto cp0_unimplemented
;
6164 // gen_helper_mtc0_performance2(arg);
6165 rn
= "Performance2";
6166 goto cp0_unimplemented
;
6168 // gen_helper_mtc0_performance3(arg);
6169 rn
= "Performance3";
6170 goto cp0_unimplemented
;
6172 // gen_helper_mtc0_performance4(arg);
6173 rn
= "Performance4";
6174 goto cp0_unimplemented
;
6176 // gen_helper_mtc0_performance5(arg);
6177 rn
= "Performance5";
6178 goto cp0_unimplemented
;
6180 // gen_helper_mtc0_performance6(arg);
6181 rn
= "Performance6";
6182 goto cp0_unimplemented
;
6184 // gen_helper_mtc0_performance7(arg);
6185 rn
= "Performance7";
6186 goto cp0_unimplemented
;
6188 goto cp0_unimplemented
;
6194 gen_helper_mtc0_errctl(cpu_env
, arg
);
6195 ctx
->bstate
= BS_STOP
;
6199 goto cp0_unimplemented
;
6209 goto cp0_unimplemented
;
6218 gen_helper_mtc0_taglo(cpu_env
, arg
);
6225 gen_helper_mtc0_datalo(cpu_env
, arg
);
6229 goto cp0_unimplemented
;
6238 gen_helper_mtc0_taghi(cpu_env
, arg
);
6245 gen_helper_mtc0_datahi(cpu_env
, arg
);
6250 goto cp0_unimplemented
;
6256 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6260 goto cp0_unimplemented
;
6267 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6271 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6272 tcg_gen_st_tl(arg
, cpu_env
,
6273 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6277 goto cp0_unimplemented
;
6279 /* Stop translation as we may have switched the execution mode */
6280 ctx
->bstate
= BS_STOP
;
6283 goto cp0_unimplemented
;
6285 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6287 /* For simplicity assume that all writes can cause interrupts. */
6288 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6290 ctx
->bstate
= BS_STOP
;
6295 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6298 #if defined(TARGET_MIPS64)
6299 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6301 const char *rn
= "invalid";
6304 check_insn(ctx
, ISA_MIPS64
);
6310 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6314 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6315 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6319 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6320 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6324 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6325 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6330 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6334 goto cp0_unimplemented
;
6340 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6341 gen_helper_mfc0_random(arg
, cpu_env
);
6345 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6346 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6350 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6355 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6360 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6361 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6365 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6370 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6371 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6372 rn
= "VPEScheFBack";
6375 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6380 goto cp0_unimplemented
;
6386 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6391 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6395 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6396 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6400 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6401 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6405 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6406 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6410 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6411 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6415 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6416 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6420 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6421 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6425 goto cp0_unimplemented
;
6431 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6436 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6437 rn
= "GlobalNumber";
6440 goto cp0_unimplemented
;
6446 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6450 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6451 rn
= "ContextConfig";
6452 goto cp0_unimplemented
;
6454 CP0_CHECK(ctx
->ulri
);
6455 tcg_gen_ld_tl(arg
, cpu_env
,
6456 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6460 goto cp0_unimplemented
;
6466 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6470 check_insn(ctx
, ISA_MIPS32R2
);
6471 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6475 goto cp0_unimplemented
;
6481 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6485 check_insn(ctx
, ISA_MIPS32R2
);
6486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6490 check_insn(ctx
, ISA_MIPS32R2
);
6491 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6495 check_insn(ctx
, ISA_MIPS32R2
);
6496 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6500 check_insn(ctx
, ISA_MIPS32R2
);
6501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6505 check_insn(ctx
, ISA_MIPS32R2
);
6506 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6510 goto cp0_unimplemented
;
6516 check_insn(ctx
, ISA_MIPS32R2
);
6517 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6521 goto cp0_unimplemented
;
6527 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6541 goto cp0_unimplemented
;
6547 /* Mark as an IO operation because we read the time. */
6548 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6551 gen_helper_mfc0_count(arg
, cpu_env
);
6552 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6555 /* Break the TB to be able to take timer interrupts immediately
6556 after reading count. */
6557 ctx
->bstate
= BS_STOP
;
6560 /* 6,7 are implementation dependent */
6562 goto cp0_unimplemented
;
6568 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6572 goto cp0_unimplemented
;
6578 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6581 /* 6,7 are implementation dependent */
6583 goto cp0_unimplemented
;
6589 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6593 check_insn(ctx
, ISA_MIPS32R2
);
6594 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6598 check_insn(ctx
, ISA_MIPS32R2
);
6599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6603 check_insn(ctx
, ISA_MIPS32R2
);
6604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6608 goto cp0_unimplemented
;
6614 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6618 goto cp0_unimplemented
;
6624 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6628 goto cp0_unimplemented
;
6634 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6638 check_insn(ctx
, ISA_MIPS32R2
);
6639 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6643 check_insn(ctx
, ISA_MIPS32R2
);
6644 CP0_CHECK(ctx
->cmgcr
);
6645 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6649 goto cp0_unimplemented
;
6655 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6659 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6667 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6675 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6678 /* 6,7 are implementation dependent */
6680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6684 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6688 goto cp0_unimplemented
;
6694 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6698 CP0_CHECK(ctx
->mrp
);
6699 gen_helper_dmfc0_maar(arg
, cpu_env
);
6703 CP0_CHECK(ctx
->mrp
);
6704 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6708 goto cp0_unimplemented
;
6714 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6718 goto cp0_unimplemented
;
6724 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6728 goto cp0_unimplemented
;
6734 check_insn(ctx
, ISA_MIPS3
);
6735 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6739 goto cp0_unimplemented
;
6743 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6744 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6751 goto cp0_unimplemented
;
6755 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6756 rn
= "'Diagnostic"; /* implementation dependent */
6761 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6765 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6766 rn
= "TraceControl";
6767 goto cp0_unimplemented
;
6769 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6770 rn
= "TraceControl2";
6771 goto cp0_unimplemented
;
6773 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6774 rn
= "UserTraceData";
6775 goto cp0_unimplemented
;
6777 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6779 goto cp0_unimplemented
;
6781 goto cp0_unimplemented
;
6788 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6792 goto cp0_unimplemented
;
6798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6799 rn
= "Performance0";
6802 // gen_helper_dmfc0_performance1(arg);
6803 rn
= "Performance1";
6804 goto cp0_unimplemented
;
6806 // gen_helper_dmfc0_performance2(arg);
6807 rn
= "Performance2";
6808 goto cp0_unimplemented
;
6810 // gen_helper_dmfc0_performance3(arg);
6811 rn
= "Performance3";
6812 goto cp0_unimplemented
;
6814 // gen_helper_dmfc0_performance4(arg);
6815 rn
= "Performance4";
6816 goto cp0_unimplemented
;
6818 // gen_helper_dmfc0_performance5(arg);
6819 rn
= "Performance5";
6820 goto cp0_unimplemented
;
6822 // gen_helper_dmfc0_performance6(arg);
6823 rn
= "Performance6";
6824 goto cp0_unimplemented
;
6826 // gen_helper_dmfc0_performance7(arg);
6827 rn
= "Performance7";
6828 goto cp0_unimplemented
;
6830 goto cp0_unimplemented
;
6836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6840 goto cp0_unimplemented
;
6847 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6851 goto cp0_unimplemented
;
6860 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6867 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6871 goto cp0_unimplemented
;
6880 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6887 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6891 goto cp0_unimplemented
;
6897 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6901 goto cp0_unimplemented
;
6908 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6912 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6913 tcg_gen_ld_tl(arg
, cpu_env
,
6914 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6918 goto cp0_unimplemented
;
6922 goto cp0_unimplemented
;
6924 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
6928 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6929 gen_mfc0_unimplemented(ctx
, arg
);
6932 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6934 const char *rn
= "invalid";
6937 check_insn(ctx
, ISA_MIPS64
);
6939 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6947 gen_helper_mtc0_index(cpu_env
, arg
);
6951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6952 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6961 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6971 goto cp0_unimplemented
;
6981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6982 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6987 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6992 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6997 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7002 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7007 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7008 rn
= "VPEScheFBack";
7011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7012 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7016 goto cp0_unimplemented
;
7022 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7027 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7032 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7037 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7042 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7047 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7052 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7056 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7057 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7061 goto cp0_unimplemented
;
7067 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7073 rn
= "GlobalNumber";
7076 goto cp0_unimplemented
;
7082 gen_helper_mtc0_context(cpu_env
, arg
);
7086 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7087 rn
= "ContextConfig";
7088 goto cp0_unimplemented
;
7090 CP0_CHECK(ctx
->ulri
);
7091 tcg_gen_st_tl(arg
, cpu_env
,
7092 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7096 goto cp0_unimplemented
;
7102 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7106 check_insn(ctx
, ISA_MIPS32R2
);
7107 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7111 goto cp0_unimplemented
;
7117 gen_helper_mtc0_wired(cpu_env
, arg
);
7121 check_insn(ctx
, ISA_MIPS32R2
);
7122 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7126 check_insn(ctx
, ISA_MIPS32R2
);
7127 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7131 check_insn(ctx
, ISA_MIPS32R2
);
7132 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7136 check_insn(ctx
, ISA_MIPS32R2
);
7137 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7141 check_insn(ctx
, ISA_MIPS32R2
);
7142 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7146 goto cp0_unimplemented
;
7152 check_insn(ctx
, ISA_MIPS32R2
);
7153 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7154 ctx
->bstate
= BS_STOP
;
7158 goto cp0_unimplemented
;
7176 goto cp0_unimplemented
;
7182 gen_helper_mtc0_count(cpu_env
, arg
);
7185 /* 6,7 are implementation dependent */
7187 goto cp0_unimplemented
;
7189 /* Stop translation as we may have switched the execution mode */
7190 ctx
->bstate
= BS_STOP
;
7195 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7199 goto cp0_unimplemented
;
7205 gen_helper_mtc0_compare(cpu_env
, arg
);
7208 /* 6,7 are implementation dependent */
7210 goto cp0_unimplemented
;
7212 /* Stop translation as we may have switched the execution mode */
7213 ctx
->bstate
= BS_STOP
;
7218 save_cpu_state(ctx
, 1);
7219 gen_helper_mtc0_status(cpu_env
, arg
);
7220 /* BS_STOP isn't good enough here, hflags may have changed. */
7221 gen_save_pc(ctx
->pc
+ 4);
7222 ctx
->bstate
= BS_EXCP
;
7226 check_insn(ctx
, ISA_MIPS32R2
);
7227 gen_helper_mtc0_intctl(cpu_env
, arg
);
7228 /* Stop translation as we may have switched the execution mode */
7229 ctx
->bstate
= BS_STOP
;
7233 check_insn(ctx
, ISA_MIPS32R2
);
7234 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7235 /* Stop translation as we may have switched the execution mode */
7236 ctx
->bstate
= BS_STOP
;
7240 check_insn(ctx
, ISA_MIPS32R2
);
7241 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7242 /* Stop translation as we may have switched the execution mode */
7243 ctx
->bstate
= BS_STOP
;
7247 goto cp0_unimplemented
;
7253 save_cpu_state(ctx
, 1);
7254 /* Mark as an IO operation because we may trigger a software
7256 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7259 gen_helper_mtc0_cause(cpu_env
, arg
);
7260 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7263 /* Stop translation as we may have triggered an intetrupt */
7264 ctx
->bstate
= BS_STOP
;
7268 goto cp0_unimplemented
;
7274 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7278 goto cp0_unimplemented
;
7288 check_insn(ctx
, ISA_MIPS32R2
);
7289 gen_helper_mtc0_ebase(cpu_env
, arg
);
7293 goto cp0_unimplemented
;
7299 gen_helper_mtc0_config0(cpu_env
, arg
);
7301 /* Stop translation as we may have switched the execution mode */
7302 ctx
->bstate
= BS_STOP
;
7305 /* ignored, read only */
7309 gen_helper_mtc0_config2(cpu_env
, arg
);
7311 /* Stop translation as we may have switched the execution mode */
7312 ctx
->bstate
= BS_STOP
;
7315 gen_helper_mtc0_config3(cpu_env
, arg
);
7317 /* Stop translation as we may have switched the execution mode */
7318 ctx
->bstate
= BS_STOP
;
7321 /* currently ignored */
7325 gen_helper_mtc0_config5(cpu_env
, arg
);
7327 /* Stop translation as we may have switched the execution mode */
7328 ctx
->bstate
= BS_STOP
;
7330 /* 6,7 are implementation dependent */
7332 rn
= "Invalid config selector";
7333 goto cp0_unimplemented
;
7339 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7343 CP0_CHECK(ctx
->mrp
);
7344 gen_helper_mtc0_maar(cpu_env
, arg
);
7348 CP0_CHECK(ctx
->mrp
);
7349 gen_helper_mtc0_maari(cpu_env
, arg
);
7353 goto cp0_unimplemented
;
7359 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7363 goto cp0_unimplemented
;
7369 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7373 goto cp0_unimplemented
;
7379 check_insn(ctx
, ISA_MIPS3
);
7380 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7384 goto cp0_unimplemented
;
7388 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7389 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7392 gen_helper_mtc0_framemask(cpu_env
, arg
);
7396 goto cp0_unimplemented
;
7401 rn
= "Diagnostic"; /* implementation dependent */
7406 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7407 /* BS_STOP isn't good enough here, hflags may have changed. */
7408 gen_save_pc(ctx
->pc
+ 4);
7409 ctx
->bstate
= BS_EXCP
;
7413 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7414 /* Stop translation as we may have switched the execution mode */
7415 ctx
->bstate
= BS_STOP
;
7416 rn
= "TraceControl";
7417 goto cp0_unimplemented
;
7419 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7420 /* Stop translation as we may have switched the execution mode */
7421 ctx
->bstate
= BS_STOP
;
7422 rn
= "TraceControl2";
7423 goto cp0_unimplemented
;
7425 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7426 /* Stop translation as we may have switched the execution mode */
7427 ctx
->bstate
= BS_STOP
;
7428 rn
= "UserTraceData";
7429 goto cp0_unimplemented
;
7431 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7432 /* Stop translation as we may have switched the execution mode */
7433 ctx
->bstate
= BS_STOP
;
7435 goto cp0_unimplemented
;
7437 goto cp0_unimplemented
;
7444 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7448 goto cp0_unimplemented
;
7454 gen_helper_mtc0_performance0(cpu_env
, arg
);
7455 rn
= "Performance0";
7458 // gen_helper_mtc0_performance1(cpu_env, arg);
7459 rn
= "Performance1";
7460 goto cp0_unimplemented
;
7462 // gen_helper_mtc0_performance2(cpu_env, arg);
7463 rn
= "Performance2";
7464 goto cp0_unimplemented
;
7466 // gen_helper_mtc0_performance3(cpu_env, arg);
7467 rn
= "Performance3";
7468 goto cp0_unimplemented
;
7470 // gen_helper_mtc0_performance4(cpu_env, arg);
7471 rn
= "Performance4";
7472 goto cp0_unimplemented
;
7474 // gen_helper_mtc0_performance5(cpu_env, arg);
7475 rn
= "Performance5";
7476 goto cp0_unimplemented
;
7478 // gen_helper_mtc0_performance6(cpu_env, arg);
7479 rn
= "Performance6";
7480 goto cp0_unimplemented
;
7482 // gen_helper_mtc0_performance7(cpu_env, arg);
7483 rn
= "Performance7";
7484 goto cp0_unimplemented
;
7486 goto cp0_unimplemented
;
7492 gen_helper_mtc0_errctl(cpu_env
, arg
);
7493 ctx
->bstate
= BS_STOP
;
7497 goto cp0_unimplemented
;
7507 goto cp0_unimplemented
;
7516 gen_helper_mtc0_taglo(cpu_env
, arg
);
7523 gen_helper_mtc0_datalo(cpu_env
, arg
);
7527 goto cp0_unimplemented
;
7536 gen_helper_mtc0_taghi(cpu_env
, arg
);
7543 gen_helper_mtc0_datahi(cpu_env
, arg
);
7548 goto cp0_unimplemented
;
7554 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7558 goto cp0_unimplemented
;
7565 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7569 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7570 tcg_gen_st_tl(arg
, cpu_env
,
7571 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7575 goto cp0_unimplemented
;
7577 /* Stop translation as we may have switched the execution mode */
7578 ctx
->bstate
= BS_STOP
;
7581 goto cp0_unimplemented
;
7583 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7585 /* For simplicity assume that all writes can cause interrupts. */
7586 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7588 ctx
->bstate
= BS_STOP
;
7593 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7595 #endif /* TARGET_MIPS64 */
7597 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7598 int u
, int sel
, int h
)
7600 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7601 TCGv t0
= tcg_temp_local_new();
7603 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7604 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7605 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7606 tcg_gen_movi_tl(t0
, -1);
7607 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7608 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7609 tcg_gen_movi_tl(t0
, -1);
7615 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7618 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7628 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7631 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7634 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7637 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7640 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7643 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7646 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7649 gen_mfc0(ctx
, t0
, rt
, sel
);
7656 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7659 gen_mfc0(ctx
, t0
, rt
, sel
);
7665 gen_helper_mftc0_status(t0
, cpu_env
);
7668 gen_mfc0(ctx
, t0
, rt
, sel
);
7674 gen_helper_mftc0_cause(t0
, cpu_env
);
7684 gen_helper_mftc0_epc(t0
, cpu_env
);
7694 gen_helper_mftc0_ebase(t0
, cpu_env
);
7704 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7714 gen_helper_mftc0_debug(t0
, cpu_env
);
7717 gen_mfc0(ctx
, t0
, rt
, sel
);
7722 gen_mfc0(ctx
, t0
, rt
, sel
);
7724 } else switch (sel
) {
7725 /* GPR registers. */
7727 gen_helper_1e0i(mftgpr
, t0
, rt
);
7729 /* Auxiliary CPU registers */
7733 gen_helper_1e0i(mftlo
, t0
, 0);
7736 gen_helper_1e0i(mfthi
, t0
, 0);
7739 gen_helper_1e0i(mftacx
, t0
, 0);
7742 gen_helper_1e0i(mftlo
, t0
, 1);
7745 gen_helper_1e0i(mfthi
, t0
, 1);
7748 gen_helper_1e0i(mftacx
, t0
, 1);
7751 gen_helper_1e0i(mftlo
, t0
, 2);
7754 gen_helper_1e0i(mfthi
, t0
, 2);
7757 gen_helper_1e0i(mftacx
, t0
, 2);
7760 gen_helper_1e0i(mftlo
, t0
, 3);
7763 gen_helper_1e0i(mfthi
, t0
, 3);
7766 gen_helper_1e0i(mftacx
, t0
, 3);
7769 gen_helper_mftdsp(t0
, cpu_env
);
7775 /* Floating point (COP1). */
7777 /* XXX: For now we support only a single FPU context. */
7779 TCGv_i32 fp0
= tcg_temp_new_i32();
7781 gen_load_fpr32(ctx
, fp0
, rt
);
7782 tcg_gen_ext_i32_tl(t0
, fp0
);
7783 tcg_temp_free_i32(fp0
);
7785 TCGv_i32 fp0
= tcg_temp_new_i32();
7787 gen_load_fpr32h(ctx
, fp0
, rt
);
7788 tcg_gen_ext_i32_tl(t0
, fp0
);
7789 tcg_temp_free_i32(fp0
);
7793 /* XXX: For now we support only a single FPU context. */
7794 gen_helper_1e0i(cfc1
, t0
, rt
);
7796 /* COP2: Not implemented. */
7803 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7804 gen_store_gpr(t0
, rd
);
7810 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7811 generate_exception_end(ctx
, EXCP_RI
);
7814 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7815 int u
, int sel
, int h
)
7817 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7818 TCGv t0
= tcg_temp_local_new();
7820 gen_load_gpr(t0
, rt
);
7821 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7822 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7823 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7825 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7826 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7833 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7836 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7846 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7849 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7852 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7855 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7858 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7861 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7864 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7867 gen_mtc0(ctx
, t0
, rd
, sel
);
7874 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7877 gen_mtc0(ctx
, t0
, rd
, sel
);
7883 gen_helper_mttc0_status(cpu_env
, t0
);
7886 gen_mtc0(ctx
, t0
, rd
, sel
);
7892 gen_helper_mttc0_cause(cpu_env
, t0
);
7902 gen_helper_mttc0_ebase(cpu_env
, t0
);
7912 gen_helper_mttc0_debug(cpu_env
, t0
);
7915 gen_mtc0(ctx
, t0
, rd
, sel
);
7920 gen_mtc0(ctx
, t0
, rd
, sel
);
7922 } else switch (sel
) {
7923 /* GPR registers. */
7925 gen_helper_0e1i(mttgpr
, t0
, rd
);
7927 /* Auxiliary CPU registers */
7931 gen_helper_0e1i(mttlo
, t0
, 0);
7934 gen_helper_0e1i(mtthi
, t0
, 0);
7937 gen_helper_0e1i(mttacx
, t0
, 0);
7940 gen_helper_0e1i(mttlo
, t0
, 1);
7943 gen_helper_0e1i(mtthi
, t0
, 1);
7946 gen_helper_0e1i(mttacx
, t0
, 1);
7949 gen_helper_0e1i(mttlo
, t0
, 2);
7952 gen_helper_0e1i(mtthi
, t0
, 2);
7955 gen_helper_0e1i(mttacx
, t0
, 2);
7958 gen_helper_0e1i(mttlo
, t0
, 3);
7961 gen_helper_0e1i(mtthi
, t0
, 3);
7964 gen_helper_0e1i(mttacx
, t0
, 3);
7967 gen_helper_mttdsp(cpu_env
, t0
);
7973 /* Floating point (COP1). */
7975 /* XXX: For now we support only a single FPU context. */
7977 TCGv_i32 fp0
= tcg_temp_new_i32();
7979 tcg_gen_trunc_tl_i32(fp0
, t0
);
7980 gen_store_fpr32(ctx
, fp0
, rd
);
7981 tcg_temp_free_i32(fp0
);
7983 TCGv_i32 fp0
= tcg_temp_new_i32();
7985 tcg_gen_trunc_tl_i32(fp0
, t0
);
7986 gen_store_fpr32h(ctx
, fp0
, rd
);
7987 tcg_temp_free_i32(fp0
);
7991 /* XXX: For now we support only a single FPU context. */
7993 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7995 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7996 tcg_temp_free_i32(fs_tmp
);
7998 /* Stop translation as we may have changed hflags */
7999 ctx
->bstate
= BS_STOP
;
8001 /* COP2: Not implemented. */
8008 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8014 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8015 generate_exception_end(ctx
, EXCP_RI
);
8018 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8020 const char *opn
= "ldst";
8022 check_cp0_enabled(ctx
);
8029 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8034 TCGv t0
= tcg_temp_new();
8036 gen_load_gpr(t0
, rt
);
8037 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8042 #if defined(TARGET_MIPS64)
8044 check_insn(ctx
, ISA_MIPS3
);
8049 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8053 check_insn(ctx
, ISA_MIPS3
);
8055 TCGv t0
= tcg_temp_new();
8057 gen_load_gpr(t0
, rt
);
8058 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8070 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8076 TCGv t0
= tcg_temp_new();
8077 gen_load_gpr(t0
, rt
);
8078 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8084 check_insn(ctx
, ASE_MT
);
8089 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8090 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8094 check_insn(ctx
, ASE_MT
);
8095 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8096 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8101 if (!env
->tlb
->helper_tlbwi
)
8103 gen_helper_tlbwi(cpu_env
);
8108 if (!env
->tlb
->helper_tlbinv
) {
8111 gen_helper_tlbinv(cpu_env
);
8112 } /* treat as nop if TLBINV not supported */
8117 if (!env
->tlb
->helper_tlbinvf
) {
8120 gen_helper_tlbinvf(cpu_env
);
8121 } /* treat as nop if TLBINV not supported */
8125 if (!env
->tlb
->helper_tlbwr
)
8127 gen_helper_tlbwr(cpu_env
);
8131 if (!env
->tlb
->helper_tlbp
)
8133 gen_helper_tlbp(cpu_env
);
8137 if (!env
->tlb
->helper_tlbr
)
8139 gen_helper_tlbr(cpu_env
);
8141 case OPC_ERET
: /* OPC_ERETNC */
8142 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8143 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8146 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8147 if (ctx
->opcode
& (1 << bit_shift
)) {
8150 check_insn(ctx
, ISA_MIPS32R5
);
8151 gen_helper_eretnc(cpu_env
);
8155 check_insn(ctx
, ISA_MIPS2
);
8156 gen_helper_eret(cpu_env
);
8158 ctx
->bstate
= BS_EXCP
;
8163 check_insn(ctx
, ISA_MIPS32
);
8164 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8165 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8168 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8170 generate_exception_end(ctx
, EXCP_RI
);
8172 gen_helper_deret(cpu_env
);
8173 ctx
->bstate
= BS_EXCP
;
8178 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8179 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8180 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8183 /* If we get an exception, we want to restart at next instruction */
8185 save_cpu_state(ctx
, 1);
8187 gen_helper_wait(cpu_env
);
8188 ctx
->bstate
= BS_EXCP
;
8193 generate_exception_end(ctx
, EXCP_RI
);
8196 (void)opn
; /* avoid a compiler warning */
8198 #endif /* !CONFIG_USER_ONLY */
8200 /* CP1 Branches (before delay slot) */
8201 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8202 int32_t cc
, int32_t offset
)
8204 target_ulong btarget
;
8205 TCGv_i32 t0
= tcg_temp_new_i32();
8207 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8208 generate_exception_end(ctx
, EXCP_RI
);
8213 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8215 btarget
= ctx
->pc
+ 4 + offset
;
8219 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8220 tcg_gen_not_i32(t0
, t0
);
8221 tcg_gen_andi_i32(t0
, t0
, 1);
8222 tcg_gen_extu_i32_tl(bcond
, t0
);
8225 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8226 tcg_gen_not_i32(t0
, t0
);
8227 tcg_gen_andi_i32(t0
, t0
, 1);
8228 tcg_gen_extu_i32_tl(bcond
, t0
);
8231 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8232 tcg_gen_andi_i32(t0
, t0
, 1);
8233 tcg_gen_extu_i32_tl(bcond
, t0
);
8236 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8237 tcg_gen_andi_i32(t0
, t0
, 1);
8238 tcg_gen_extu_i32_tl(bcond
, t0
);
8240 ctx
->hflags
|= MIPS_HFLAG_BL
;
8244 TCGv_i32 t1
= tcg_temp_new_i32();
8245 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8246 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8247 tcg_gen_nand_i32(t0
, t0
, t1
);
8248 tcg_temp_free_i32(t1
);
8249 tcg_gen_andi_i32(t0
, t0
, 1);
8250 tcg_gen_extu_i32_tl(bcond
, t0
);
8255 TCGv_i32 t1
= tcg_temp_new_i32();
8256 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8257 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8258 tcg_gen_or_i32(t0
, t0
, t1
);
8259 tcg_temp_free_i32(t1
);
8260 tcg_gen_andi_i32(t0
, t0
, 1);
8261 tcg_gen_extu_i32_tl(bcond
, t0
);
8266 TCGv_i32 t1
= tcg_temp_new_i32();
8267 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8268 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8269 tcg_gen_and_i32(t0
, t0
, t1
);
8270 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8271 tcg_gen_and_i32(t0
, t0
, t1
);
8272 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8273 tcg_gen_nand_i32(t0
, t0
, t1
);
8274 tcg_temp_free_i32(t1
);
8275 tcg_gen_andi_i32(t0
, t0
, 1);
8276 tcg_gen_extu_i32_tl(bcond
, t0
);
8281 TCGv_i32 t1
= tcg_temp_new_i32();
8282 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8283 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8284 tcg_gen_or_i32(t0
, t0
, t1
);
8285 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8286 tcg_gen_or_i32(t0
, t0
, t1
);
8287 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8288 tcg_gen_or_i32(t0
, t0
, t1
);
8289 tcg_temp_free_i32(t1
);
8290 tcg_gen_andi_i32(t0
, t0
, 1);
8291 tcg_gen_extu_i32_tl(bcond
, t0
);
8294 ctx
->hflags
|= MIPS_HFLAG_BC
;
8297 MIPS_INVAL("cp1 cond branch");
8298 generate_exception_end(ctx
, EXCP_RI
);
8301 ctx
->btarget
= btarget
;
8302 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8304 tcg_temp_free_i32(t0
);
8307 /* R6 CP1 Branches */
8308 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8309 int32_t ft
, int32_t offset
,
8312 target_ulong btarget
;
8313 TCGv_i64 t0
= tcg_temp_new_i64();
8315 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8316 #ifdef MIPS_DEBUG_DISAS
8317 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8320 generate_exception_end(ctx
, EXCP_RI
);
8324 gen_load_fpr64(ctx
, t0
, ft
);
8325 tcg_gen_andi_i64(t0
, t0
, 1);
8327 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8331 tcg_gen_xori_i64(t0
, t0
, 1);
8332 ctx
->hflags
|= MIPS_HFLAG_BC
;
8335 /* t0 already set */
8336 ctx
->hflags
|= MIPS_HFLAG_BC
;
8339 MIPS_INVAL("cp1 cond branch");
8340 generate_exception_end(ctx
, EXCP_RI
);
8344 tcg_gen_trunc_i64_tl(bcond
, t0
);
8346 ctx
->btarget
= btarget
;
8348 switch (delayslot_size
) {
8350 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8353 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8358 tcg_temp_free_i64(t0
);
8361 /* Coprocessor 1 (FPU) */
8363 #define FOP(func, fmt) (((fmt) << 21) | (func))
8366 OPC_ADD_S
= FOP(0, FMT_S
),
8367 OPC_SUB_S
= FOP(1, FMT_S
),
8368 OPC_MUL_S
= FOP(2, FMT_S
),
8369 OPC_DIV_S
= FOP(3, FMT_S
),
8370 OPC_SQRT_S
= FOP(4, FMT_S
),
8371 OPC_ABS_S
= FOP(5, FMT_S
),
8372 OPC_MOV_S
= FOP(6, FMT_S
),
8373 OPC_NEG_S
= FOP(7, FMT_S
),
8374 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8375 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8376 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8377 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8378 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8379 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8380 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8381 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8382 OPC_SEL_S
= FOP(16, FMT_S
),
8383 OPC_MOVCF_S
= FOP(17, FMT_S
),
8384 OPC_MOVZ_S
= FOP(18, FMT_S
),
8385 OPC_MOVN_S
= FOP(19, FMT_S
),
8386 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8387 OPC_RECIP_S
= FOP(21, FMT_S
),
8388 OPC_RSQRT_S
= FOP(22, FMT_S
),
8389 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8390 OPC_MADDF_S
= FOP(24, FMT_S
),
8391 OPC_MSUBF_S
= FOP(25, FMT_S
),
8392 OPC_RINT_S
= FOP(26, FMT_S
),
8393 OPC_CLASS_S
= FOP(27, FMT_S
),
8394 OPC_MIN_S
= FOP(28, FMT_S
),
8395 OPC_RECIP2_S
= FOP(28, FMT_S
),
8396 OPC_MINA_S
= FOP(29, FMT_S
),
8397 OPC_RECIP1_S
= FOP(29, FMT_S
),
8398 OPC_MAX_S
= FOP(30, FMT_S
),
8399 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8400 OPC_MAXA_S
= FOP(31, FMT_S
),
8401 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8402 OPC_CVT_D_S
= FOP(33, FMT_S
),
8403 OPC_CVT_W_S
= FOP(36, FMT_S
),
8404 OPC_CVT_L_S
= FOP(37, FMT_S
),
8405 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8406 OPC_CMP_F_S
= FOP (48, FMT_S
),
8407 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8408 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8409 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8410 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8411 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8412 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8413 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8414 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8415 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8416 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8417 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8418 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8419 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8420 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8421 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8423 OPC_ADD_D
= FOP(0, FMT_D
),
8424 OPC_SUB_D
= FOP(1, FMT_D
),
8425 OPC_MUL_D
= FOP(2, FMT_D
),
8426 OPC_DIV_D
= FOP(3, FMT_D
),
8427 OPC_SQRT_D
= FOP(4, FMT_D
),
8428 OPC_ABS_D
= FOP(5, FMT_D
),
8429 OPC_MOV_D
= FOP(6, FMT_D
),
8430 OPC_NEG_D
= FOP(7, FMT_D
),
8431 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8432 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8433 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8434 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8435 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8436 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8437 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8438 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8439 OPC_SEL_D
= FOP(16, FMT_D
),
8440 OPC_MOVCF_D
= FOP(17, FMT_D
),
8441 OPC_MOVZ_D
= FOP(18, FMT_D
),
8442 OPC_MOVN_D
= FOP(19, FMT_D
),
8443 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8444 OPC_RECIP_D
= FOP(21, FMT_D
),
8445 OPC_RSQRT_D
= FOP(22, FMT_D
),
8446 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8447 OPC_MADDF_D
= FOP(24, FMT_D
),
8448 OPC_MSUBF_D
= FOP(25, FMT_D
),
8449 OPC_RINT_D
= FOP(26, FMT_D
),
8450 OPC_CLASS_D
= FOP(27, FMT_D
),
8451 OPC_MIN_D
= FOP(28, FMT_D
),
8452 OPC_RECIP2_D
= FOP(28, FMT_D
),
8453 OPC_MINA_D
= FOP(29, FMT_D
),
8454 OPC_RECIP1_D
= FOP(29, FMT_D
),
8455 OPC_MAX_D
= FOP(30, FMT_D
),
8456 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8457 OPC_MAXA_D
= FOP(31, FMT_D
),
8458 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8459 OPC_CVT_S_D
= FOP(32, FMT_D
),
8460 OPC_CVT_W_D
= FOP(36, FMT_D
),
8461 OPC_CVT_L_D
= FOP(37, FMT_D
),
8462 OPC_CMP_F_D
= FOP (48, FMT_D
),
8463 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8464 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8465 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8466 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8467 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8468 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8469 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8470 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8471 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8472 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8473 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8474 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8475 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8476 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8477 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8479 OPC_CVT_S_W
= FOP(32, FMT_W
),
8480 OPC_CVT_D_W
= FOP(33, FMT_W
),
8481 OPC_CVT_S_L
= FOP(32, FMT_L
),
8482 OPC_CVT_D_L
= FOP(33, FMT_L
),
8483 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8485 OPC_ADD_PS
= FOP(0, FMT_PS
),
8486 OPC_SUB_PS
= FOP(1, FMT_PS
),
8487 OPC_MUL_PS
= FOP(2, FMT_PS
),
8488 OPC_DIV_PS
= FOP(3, FMT_PS
),
8489 OPC_ABS_PS
= FOP(5, FMT_PS
),
8490 OPC_MOV_PS
= FOP(6, FMT_PS
),
8491 OPC_NEG_PS
= FOP(7, FMT_PS
),
8492 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8493 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8494 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8495 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8496 OPC_MULR_PS
= FOP(26, FMT_PS
),
8497 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8498 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8499 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8500 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8502 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8503 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8504 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8505 OPC_PLL_PS
= FOP(44, FMT_PS
),
8506 OPC_PLU_PS
= FOP(45, FMT_PS
),
8507 OPC_PUL_PS
= FOP(46, FMT_PS
),
8508 OPC_PUU_PS
= FOP(47, FMT_PS
),
8509 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8510 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8511 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8512 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8513 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8514 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8515 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8516 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8517 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8518 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8519 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8520 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8521 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8522 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8523 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8524 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8528 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8529 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8530 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8531 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8532 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8533 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8534 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8535 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8536 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8537 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8538 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8539 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8540 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8541 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8542 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8543 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8544 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8545 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8546 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8547 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8548 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8549 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8551 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8552 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8553 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8554 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8555 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8556 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8557 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8558 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8559 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8560 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8561 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8562 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8563 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8564 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8565 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8566 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8567 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8568 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8569 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8570 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8571 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8572 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8574 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8576 TCGv t0
= tcg_temp_new();
8581 TCGv_i32 fp0
= tcg_temp_new_i32();
8583 gen_load_fpr32(ctx
, fp0
, fs
);
8584 tcg_gen_ext_i32_tl(t0
, fp0
);
8585 tcg_temp_free_i32(fp0
);
8587 gen_store_gpr(t0
, rt
);
8590 gen_load_gpr(t0
, rt
);
8592 TCGv_i32 fp0
= tcg_temp_new_i32();
8594 tcg_gen_trunc_tl_i32(fp0
, t0
);
8595 gen_store_fpr32(ctx
, fp0
, fs
);
8596 tcg_temp_free_i32(fp0
);
8600 gen_helper_1e0i(cfc1
, t0
, fs
);
8601 gen_store_gpr(t0
, rt
);
8604 gen_load_gpr(t0
, rt
);
8605 save_cpu_state(ctx
, 0);
8607 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8609 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8610 tcg_temp_free_i32(fs_tmp
);
8612 /* Stop translation as we may have changed hflags */
8613 ctx
->bstate
= BS_STOP
;
8615 #if defined(TARGET_MIPS64)
8617 gen_load_fpr64(ctx
, t0
, fs
);
8618 gen_store_gpr(t0
, rt
);
8621 gen_load_gpr(t0
, rt
);
8622 gen_store_fpr64(ctx
, t0
, fs
);
8627 TCGv_i32 fp0
= tcg_temp_new_i32();
8629 gen_load_fpr32h(ctx
, fp0
, fs
);
8630 tcg_gen_ext_i32_tl(t0
, fp0
);
8631 tcg_temp_free_i32(fp0
);
8633 gen_store_gpr(t0
, rt
);
8636 gen_load_gpr(t0
, rt
);
8638 TCGv_i32 fp0
= tcg_temp_new_i32();
8640 tcg_gen_trunc_tl_i32(fp0
, t0
);
8641 gen_store_fpr32h(ctx
, fp0
, fs
);
8642 tcg_temp_free_i32(fp0
);
8646 MIPS_INVAL("cp1 move");
8647 generate_exception_end(ctx
, EXCP_RI
);
8655 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8671 l1
= gen_new_label();
8672 t0
= tcg_temp_new_i32();
8673 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8674 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8675 tcg_temp_free_i32(t0
);
8677 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8679 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8684 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8688 TCGv_i32 t0
= tcg_temp_new_i32();
8689 TCGLabel
*l1
= gen_new_label();
8696 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8697 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8698 gen_load_fpr32(ctx
, t0
, fs
);
8699 gen_store_fpr32(ctx
, t0
, fd
);
8701 tcg_temp_free_i32(t0
);
8704 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8707 TCGv_i32 t0
= tcg_temp_new_i32();
8709 TCGLabel
*l1
= gen_new_label();
8716 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8717 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8718 tcg_temp_free_i32(t0
);
8719 fp0
= tcg_temp_new_i64();
8720 gen_load_fpr64(ctx
, fp0
, fs
);
8721 gen_store_fpr64(ctx
, fp0
, fd
);
8722 tcg_temp_free_i64(fp0
);
8726 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8730 TCGv_i32 t0
= tcg_temp_new_i32();
8731 TCGLabel
*l1
= gen_new_label();
8732 TCGLabel
*l2
= gen_new_label();
8739 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8740 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8741 gen_load_fpr32(ctx
, t0
, fs
);
8742 gen_store_fpr32(ctx
, t0
, fd
);
8745 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8746 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8747 gen_load_fpr32h(ctx
, t0
, fs
);
8748 gen_store_fpr32h(ctx
, t0
, fd
);
8749 tcg_temp_free_i32(t0
);
8753 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8756 TCGv_i32 t1
= tcg_const_i32(0);
8757 TCGv_i32 fp0
= tcg_temp_new_i32();
8758 TCGv_i32 fp1
= tcg_temp_new_i32();
8759 TCGv_i32 fp2
= tcg_temp_new_i32();
8760 gen_load_fpr32(ctx
, fp0
, fd
);
8761 gen_load_fpr32(ctx
, fp1
, ft
);
8762 gen_load_fpr32(ctx
, fp2
, fs
);
8766 tcg_gen_andi_i32(fp0
, fp0
, 1);
8767 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8770 tcg_gen_andi_i32(fp1
, fp1
, 1);
8771 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8774 tcg_gen_andi_i32(fp1
, fp1
, 1);
8775 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8778 MIPS_INVAL("gen_sel_s");
8779 generate_exception_end(ctx
, EXCP_RI
);
8783 gen_store_fpr32(ctx
, fp0
, fd
);
8784 tcg_temp_free_i32(fp2
);
8785 tcg_temp_free_i32(fp1
);
8786 tcg_temp_free_i32(fp0
);
8787 tcg_temp_free_i32(t1
);
8790 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8793 TCGv_i64 t1
= tcg_const_i64(0);
8794 TCGv_i64 fp0
= tcg_temp_new_i64();
8795 TCGv_i64 fp1
= tcg_temp_new_i64();
8796 TCGv_i64 fp2
= tcg_temp_new_i64();
8797 gen_load_fpr64(ctx
, fp0
, fd
);
8798 gen_load_fpr64(ctx
, fp1
, ft
);
8799 gen_load_fpr64(ctx
, fp2
, fs
);
8803 tcg_gen_andi_i64(fp0
, fp0
, 1);
8804 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8807 tcg_gen_andi_i64(fp1
, fp1
, 1);
8808 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8811 tcg_gen_andi_i64(fp1
, fp1
, 1);
8812 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8815 MIPS_INVAL("gen_sel_d");
8816 generate_exception_end(ctx
, EXCP_RI
);
8820 gen_store_fpr64(ctx
, fp0
, fd
);
8821 tcg_temp_free_i64(fp2
);
8822 tcg_temp_free_i64(fp1
);
8823 tcg_temp_free_i64(fp0
);
8824 tcg_temp_free_i64(t1
);
8827 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8828 int ft
, int fs
, int fd
, int cc
)
8830 uint32_t func
= ctx
->opcode
& 0x3f;
8834 TCGv_i32 fp0
= tcg_temp_new_i32();
8835 TCGv_i32 fp1
= tcg_temp_new_i32();
8837 gen_load_fpr32(ctx
, fp0
, fs
);
8838 gen_load_fpr32(ctx
, fp1
, ft
);
8839 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8840 tcg_temp_free_i32(fp1
);
8841 gen_store_fpr32(ctx
, fp0
, fd
);
8842 tcg_temp_free_i32(fp0
);
8847 TCGv_i32 fp0
= tcg_temp_new_i32();
8848 TCGv_i32 fp1
= tcg_temp_new_i32();
8850 gen_load_fpr32(ctx
, fp0
, fs
);
8851 gen_load_fpr32(ctx
, fp1
, ft
);
8852 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8853 tcg_temp_free_i32(fp1
);
8854 gen_store_fpr32(ctx
, fp0
, fd
);
8855 tcg_temp_free_i32(fp0
);
8860 TCGv_i32 fp0
= tcg_temp_new_i32();
8861 TCGv_i32 fp1
= tcg_temp_new_i32();
8863 gen_load_fpr32(ctx
, fp0
, fs
);
8864 gen_load_fpr32(ctx
, fp1
, ft
);
8865 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8866 tcg_temp_free_i32(fp1
);
8867 gen_store_fpr32(ctx
, fp0
, fd
);
8868 tcg_temp_free_i32(fp0
);
8873 TCGv_i32 fp0
= tcg_temp_new_i32();
8874 TCGv_i32 fp1
= tcg_temp_new_i32();
8876 gen_load_fpr32(ctx
, fp0
, fs
);
8877 gen_load_fpr32(ctx
, fp1
, ft
);
8878 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8879 tcg_temp_free_i32(fp1
);
8880 gen_store_fpr32(ctx
, fp0
, fd
);
8881 tcg_temp_free_i32(fp0
);
8886 TCGv_i32 fp0
= tcg_temp_new_i32();
8888 gen_load_fpr32(ctx
, fp0
, fs
);
8889 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8890 gen_store_fpr32(ctx
, fp0
, fd
);
8891 tcg_temp_free_i32(fp0
);
8896 TCGv_i32 fp0
= tcg_temp_new_i32();
8898 gen_load_fpr32(ctx
, fp0
, fs
);
8900 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
8902 gen_helper_float_abs_s(fp0
, fp0
);
8904 gen_store_fpr32(ctx
, fp0
, fd
);
8905 tcg_temp_free_i32(fp0
);
8910 TCGv_i32 fp0
= tcg_temp_new_i32();
8912 gen_load_fpr32(ctx
, fp0
, fs
);
8913 gen_store_fpr32(ctx
, fp0
, fd
);
8914 tcg_temp_free_i32(fp0
);
8919 TCGv_i32 fp0
= tcg_temp_new_i32();
8921 gen_load_fpr32(ctx
, fp0
, fs
);
8923 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
8925 gen_helper_float_chs_s(fp0
, fp0
);
8927 gen_store_fpr32(ctx
, fp0
, fd
);
8928 tcg_temp_free_i32(fp0
);
8932 check_cp1_64bitmode(ctx
);
8934 TCGv_i32 fp32
= tcg_temp_new_i32();
8935 TCGv_i64 fp64
= tcg_temp_new_i64();
8937 gen_load_fpr32(ctx
, fp32
, fs
);
8939 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
8941 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
8943 tcg_temp_free_i32(fp32
);
8944 gen_store_fpr64(ctx
, fp64
, fd
);
8945 tcg_temp_free_i64(fp64
);
8949 check_cp1_64bitmode(ctx
);
8951 TCGv_i32 fp32
= tcg_temp_new_i32();
8952 TCGv_i64 fp64
= tcg_temp_new_i64();
8954 gen_load_fpr32(ctx
, fp32
, fs
);
8956 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
8958 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
8960 tcg_temp_free_i32(fp32
);
8961 gen_store_fpr64(ctx
, fp64
, fd
);
8962 tcg_temp_free_i64(fp64
);
8966 check_cp1_64bitmode(ctx
);
8968 TCGv_i32 fp32
= tcg_temp_new_i32();
8969 TCGv_i64 fp64
= tcg_temp_new_i64();
8971 gen_load_fpr32(ctx
, fp32
, fs
);
8973 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
8975 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
8977 tcg_temp_free_i32(fp32
);
8978 gen_store_fpr64(ctx
, fp64
, fd
);
8979 tcg_temp_free_i64(fp64
);
8983 check_cp1_64bitmode(ctx
);
8985 TCGv_i32 fp32
= tcg_temp_new_i32();
8986 TCGv_i64 fp64
= tcg_temp_new_i64();
8988 gen_load_fpr32(ctx
, fp32
, fs
);
8990 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
8992 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
8994 tcg_temp_free_i32(fp32
);
8995 gen_store_fpr64(ctx
, fp64
, fd
);
8996 tcg_temp_free_i64(fp64
);
9001 TCGv_i32 fp0
= tcg_temp_new_i32();
9003 gen_load_fpr32(ctx
, fp0
, fs
);
9005 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9007 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9009 gen_store_fpr32(ctx
, fp0
, fd
);
9010 tcg_temp_free_i32(fp0
);
9015 TCGv_i32 fp0
= tcg_temp_new_i32();
9017 gen_load_fpr32(ctx
, fp0
, fs
);
9019 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9021 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9023 gen_store_fpr32(ctx
, fp0
, fd
);
9024 tcg_temp_free_i32(fp0
);
9029 TCGv_i32 fp0
= tcg_temp_new_i32();
9031 gen_load_fpr32(ctx
, fp0
, fs
);
9033 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9035 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9037 gen_store_fpr32(ctx
, fp0
, fd
);
9038 tcg_temp_free_i32(fp0
);
9043 TCGv_i32 fp0
= tcg_temp_new_i32();
9045 gen_load_fpr32(ctx
, fp0
, fs
);
9047 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9049 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9051 gen_store_fpr32(ctx
, fp0
, fd
);
9052 tcg_temp_free_i32(fp0
);
9056 check_insn(ctx
, ISA_MIPS32R6
);
9057 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9060 check_insn(ctx
, ISA_MIPS32R6
);
9061 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9064 check_insn(ctx
, ISA_MIPS32R6
);
9065 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9069 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9072 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9074 TCGLabel
*l1
= gen_new_label();
9078 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9080 fp0
= tcg_temp_new_i32();
9081 gen_load_fpr32(ctx
, fp0
, fs
);
9082 gen_store_fpr32(ctx
, fp0
, fd
);
9083 tcg_temp_free_i32(fp0
);
9088 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9090 TCGLabel
*l1
= gen_new_label();
9094 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9095 fp0
= tcg_temp_new_i32();
9096 gen_load_fpr32(ctx
, fp0
, fs
);
9097 gen_store_fpr32(ctx
, fp0
, fd
);
9098 tcg_temp_free_i32(fp0
);
9105 TCGv_i32 fp0
= tcg_temp_new_i32();
9107 gen_load_fpr32(ctx
, fp0
, fs
);
9108 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9109 gen_store_fpr32(ctx
, fp0
, fd
);
9110 tcg_temp_free_i32(fp0
);
9115 TCGv_i32 fp0
= tcg_temp_new_i32();
9117 gen_load_fpr32(ctx
, fp0
, fs
);
9118 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9119 gen_store_fpr32(ctx
, fp0
, fd
);
9120 tcg_temp_free_i32(fp0
);
9124 check_insn(ctx
, ISA_MIPS32R6
);
9126 TCGv_i32 fp0
= tcg_temp_new_i32();
9127 TCGv_i32 fp1
= tcg_temp_new_i32();
9128 TCGv_i32 fp2
= tcg_temp_new_i32();
9129 gen_load_fpr32(ctx
, fp0
, fs
);
9130 gen_load_fpr32(ctx
, fp1
, ft
);
9131 gen_load_fpr32(ctx
, fp2
, fd
);
9132 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9133 gen_store_fpr32(ctx
, fp2
, fd
);
9134 tcg_temp_free_i32(fp2
);
9135 tcg_temp_free_i32(fp1
);
9136 tcg_temp_free_i32(fp0
);
9140 check_insn(ctx
, ISA_MIPS32R6
);
9142 TCGv_i32 fp0
= tcg_temp_new_i32();
9143 TCGv_i32 fp1
= tcg_temp_new_i32();
9144 TCGv_i32 fp2
= tcg_temp_new_i32();
9145 gen_load_fpr32(ctx
, fp0
, fs
);
9146 gen_load_fpr32(ctx
, fp1
, ft
);
9147 gen_load_fpr32(ctx
, fp2
, fd
);
9148 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9149 gen_store_fpr32(ctx
, fp2
, fd
);
9150 tcg_temp_free_i32(fp2
);
9151 tcg_temp_free_i32(fp1
);
9152 tcg_temp_free_i32(fp0
);
9156 check_insn(ctx
, ISA_MIPS32R6
);
9158 TCGv_i32 fp0
= tcg_temp_new_i32();
9159 gen_load_fpr32(ctx
, fp0
, fs
);
9160 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9161 gen_store_fpr32(ctx
, fp0
, fd
);
9162 tcg_temp_free_i32(fp0
);
9166 check_insn(ctx
, ISA_MIPS32R6
);
9168 TCGv_i32 fp0
= tcg_temp_new_i32();
9169 gen_load_fpr32(ctx
, fp0
, fs
);
9170 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9171 gen_store_fpr32(ctx
, fp0
, fd
);
9172 tcg_temp_free_i32(fp0
);
9175 case OPC_MIN_S
: /* OPC_RECIP2_S */
9176 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9178 TCGv_i32 fp0
= tcg_temp_new_i32();
9179 TCGv_i32 fp1
= tcg_temp_new_i32();
9180 TCGv_i32 fp2
= tcg_temp_new_i32();
9181 gen_load_fpr32(ctx
, fp0
, fs
);
9182 gen_load_fpr32(ctx
, fp1
, ft
);
9183 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9184 gen_store_fpr32(ctx
, fp2
, fd
);
9185 tcg_temp_free_i32(fp2
);
9186 tcg_temp_free_i32(fp1
);
9187 tcg_temp_free_i32(fp0
);
9190 check_cp1_64bitmode(ctx
);
9192 TCGv_i32 fp0
= tcg_temp_new_i32();
9193 TCGv_i32 fp1
= tcg_temp_new_i32();
9195 gen_load_fpr32(ctx
, fp0
, fs
);
9196 gen_load_fpr32(ctx
, fp1
, ft
);
9197 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9198 tcg_temp_free_i32(fp1
);
9199 gen_store_fpr32(ctx
, fp0
, fd
);
9200 tcg_temp_free_i32(fp0
);
9204 case OPC_MINA_S
: /* OPC_RECIP1_S */
9205 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9207 TCGv_i32 fp0
= tcg_temp_new_i32();
9208 TCGv_i32 fp1
= tcg_temp_new_i32();
9209 TCGv_i32 fp2
= tcg_temp_new_i32();
9210 gen_load_fpr32(ctx
, fp0
, fs
);
9211 gen_load_fpr32(ctx
, fp1
, ft
);
9212 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9213 gen_store_fpr32(ctx
, fp2
, fd
);
9214 tcg_temp_free_i32(fp2
);
9215 tcg_temp_free_i32(fp1
);
9216 tcg_temp_free_i32(fp0
);
9219 check_cp1_64bitmode(ctx
);
9221 TCGv_i32 fp0
= tcg_temp_new_i32();
9223 gen_load_fpr32(ctx
, fp0
, fs
);
9224 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9225 gen_store_fpr32(ctx
, fp0
, fd
);
9226 tcg_temp_free_i32(fp0
);
9230 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9231 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9233 TCGv_i32 fp0
= tcg_temp_new_i32();
9234 TCGv_i32 fp1
= tcg_temp_new_i32();
9235 gen_load_fpr32(ctx
, fp0
, fs
);
9236 gen_load_fpr32(ctx
, fp1
, ft
);
9237 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9238 gen_store_fpr32(ctx
, fp1
, fd
);
9239 tcg_temp_free_i32(fp1
);
9240 tcg_temp_free_i32(fp0
);
9243 check_cp1_64bitmode(ctx
);
9245 TCGv_i32 fp0
= tcg_temp_new_i32();
9247 gen_load_fpr32(ctx
, fp0
, fs
);
9248 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9249 gen_store_fpr32(ctx
, fp0
, fd
);
9250 tcg_temp_free_i32(fp0
);
9254 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9255 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9257 TCGv_i32 fp0
= tcg_temp_new_i32();
9258 TCGv_i32 fp1
= tcg_temp_new_i32();
9259 gen_load_fpr32(ctx
, fp0
, fs
);
9260 gen_load_fpr32(ctx
, fp1
, ft
);
9261 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9262 gen_store_fpr32(ctx
, fp1
, fd
);
9263 tcg_temp_free_i32(fp1
);
9264 tcg_temp_free_i32(fp0
);
9267 check_cp1_64bitmode(ctx
);
9269 TCGv_i32 fp0
= tcg_temp_new_i32();
9270 TCGv_i32 fp1
= tcg_temp_new_i32();
9272 gen_load_fpr32(ctx
, fp0
, fs
);
9273 gen_load_fpr32(ctx
, fp1
, ft
);
9274 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9275 tcg_temp_free_i32(fp1
);
9276 gen_store_fpr32(ctx
, fp0
, fd
);
9277 tcg_temp_free_i32(fp0
);
9282 check_cp1_registers(ctx
, fd
);
9284 TCGv_i32 fp32
= tcg_temp_new_i32();
9285 TCGv_i64 fp64
= tcg_temp_new_i64();
9287 gen_load_fpr32(ctx
, fp32
, fs
);
9288 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9289 tcg_temp_free_i32(fp32
);
9290 gen_store_fpr64(ctx
, fp64
, fd
);
9291 tcg_temp_free_i64(fp64
);
9296 TCGv_i32 fp0
= tcg_temp_new_i32();
9298 gen_load_fpr32(ctx
, fp0
, fs
);
9300 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9302 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9304 gen_store_fpr32(ctx
, fp0
, fd
);
9305 tcg_temp_free_i32(fp0
);
9309 check_cp1_64bitmode(ctx
);
9311 TCGv_i32 fp32
= tcg_temp_new_i32();
9312 TCGv_i64 fp64
= tcg_temp_new_i64();
9314 gen_load_fpr32(ctx
, fp32
, fs
);
9316 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9318 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9320 tcg_temp_free_i32(fp32
);
9321 gen_store_fpr64(ctx
, fp64
, fd
);
9322 tcg_temp_free_i64(fp64
);
9328 TCGv_i64 fp64
= tcg_temp_new_i64();
9329 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9330 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9332 gen_load_fpr32(ctx
, fp32_0
, fs
);
9333 gen_load_fpr32(ctx
, fp32_1
, ft
);
9334 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9335 tcg_temp_free_i32(fp32_1
);
9336 tcg_temp_free_i32(fp32_0
);
9337 gen_store_fpr64(ctx
, fp64
, fd
);
9338 tcg_temp_free_i64(fp64
);
9350 case OPC_CMP_NGLE_S
:
9357 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9358 if (ctx
->opcode
& (1 << 6)) {
9359 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9361 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9365 check_cp1_registers(ctx
, fs
| ft
| fd
);
9367 TCGv_i64 fp0
= tcg_temp_new_i64();
9368 TCGv_i64 fp1
= tcg_temp_new_i64();
9370 gen_load_fpr64(ctx
, fp0
, fs
);
9371 gen_load_fpr64(ctx
, fp1
, ft
);
9372 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9373 tcg_temp_free_i64(fp1
);
9374 gen_store_fpr64(ctx
, fp0
, fd
);
9375 tcg_temp_free_i64(fp0
);
9379 check_cp1_registers(ctx
, fs
| ft
| fd
);
9381 TCGv_i64 fp0
= tcg_temp_new_i64();
9382 TCGv_i64 fp1
= tcg_temp_new_i64();
9384 gen_load_fpr64(ctx
, fp0
, fs
);
9385 gen_load_fpr64(ctx
, fp1
, ft
);
9386 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9387 tcg_temp_free_i64(fp1
);
9388 gen_store_fpr64(ctx
, fp0
, fd
);
9389 tcg_temp_free_i64(fp0
);
9393 check_cp1_registers(ctx
, fs
| ft
| fd
);
9395 TCGv_i64 fp0
= tcg_temp_new_i64();
9396 TCGv_i64 fp1
= tcg_temp_new_i64();
9398 gen_load_fpr64(ctx
, fp0
, fs
);
9399 gen_load_fpr64(ctx
, fp1
, ft
);
9400 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9401 tcg_temp_free_i64(fp1
);
9402 gen_store_fpr64(ctx
, fp0
, fd
);
9403 tcg_temp_free_i64(fp0
);
9407 check_cp1_registers(ctx
, fs
| ft
| fd
);
9409 TCGv_i64 fp0
= tcg_temp_new_i64();
9410 TCGv_i64 fp1
= tcg_temp_new_i64();
9412 gen_load_fpr64(ctx
, fp0
, fs
);
9413 gen_load_fpr64(ctx
, fp1
, ft
);
9414 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9415 tcg_temp_free_i64(fp1
);
9416 gen_store_fpr64(ctx
, fp0
, fd
);
9417 tcg_temp_free_i64(fp0
);
9421 check_cp1_registers(ctx
, fs
| fd
);
9423 TCGv_i64 fp0
= tcg_temp_new_i64();
9425 gen_load_fpr64(ctx
, fp0
, fs
);
9426 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9427 gen_store_fpr64(ctx
, fp0
, fd
);
9428 tcg_temp_free_i64(fp0
);
9432 check_cp1_registers(ctx
, fs
| fd
);
9434 TCGv_i64 fp0
= tcg_temp_new_i64();
9436 gen_load_fpr64(ctx
, fp0
, fs
);
9438 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9440 gen_helper_float_abs_d(fp0
, fp0
);
9442 gen_store_fpr64(ctx
, fp0
, fd
);
9443 tcg_temp_free_i64(fp0
);
9447 check_cp1_registers(ctx
, fs
| fd
);
9449 TCGv_i64 fp0
= tcg_temp_new_i64();
9451 gen_load_fpr64(ctx
, fp0
, fs
);
9452 gen_store_fpr64(ctx
, fp0
, fd
);
9453 tcg_temp_free_i64(fp0
);
9457 check_cp1_registers(ctx
, fs
| fd
);
9459 TCGv_i64 fp0
= tcg_temp_new_i64();
9461 gen_load_fpr64(ctx
, fp0
, fs
);
9463 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9465 gen_helper_float_chs_d(fp0
, fp0
);
9467 gen_store_fpr64(ctx
, fp0
, fd
);
9468 tcg_temp_free_i64(fp0
);
9472 check_cp1_64bitmode(ctx
);
9474 TCGv_i64 fp0
= tcg_temp_new_i64();
9476 gen_load_fpr64(ctx
, fp0
, fs
);
9478 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9480 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9482 gen_store_fpr64(ctx
, fp0
, fd
);
9483 tcg_temp_free_i64(fp0
);
9487 check_cp1_64bitmode(ctx
);
9489 TCGv_i64 fp0
= tcg_temp_new_i64();
9491 gen_load_fpr64(ctx
, fp0
, fs
);
9493 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9495 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9497 gen_store_fpr64(ctx
, fp0
, fd
);
9498 tcg_temp_free_i64(fp0
);
9502 check_cp1_64bitmode(ctx
);
9504 TCGv_i64 fp0
= tcg_temp_new_i64();
9506 gen_load_fpr64(ctx
, fp0
, fs
);
9508 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9510 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9512 gen_store_fpr64(ctx
, fp0
, fd
);
9513 tcg_temp_free_i64(fp0
);
9517 check_cp1_64bitmode(ctx
);
9519 TCGv_i64 fp0
= tcg_temp_new_i64();
9521 gen_load_fpr64(ctx
, fp0
, fs
);
9523 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9525 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9527 gen_store_fpr64(ctx
, fp0
, fd
);
9528 tcg_temp_free_i64(fp0
);
9532 check_cp1_registers(ctx
, fs
);
9534 TCGv_i32 fp32
= tcg_temp_new_i32();
9535 TCGv_i64 fp64
= tcg_temp_new_i64();
9537 gen_load_fpr64(ctx
, fp64
, fs
);
9539 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9541 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9543 tcg_temp_free_i64(fp64
);
9544 gen_store_fpr32(ctx
, fp32
, fd
);
9545 tcg_temp_free_i32(fp32
);
9549 check_cp1_registers(ctx
, fs
);
9551 TCGv_i32 fp32
= tcg_temp_new_i32();
9552 TCGv_i64 fp64
= tcg_temp_new_i64();
9554 gen_load_fpr64(ctx
, fp64
, fs
);
9556 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9558 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9560 tcg_temp_free_i64(fp64
);
9561 gen_store_fpr32(ctx
, fp32
, fd
);
9562 tcg_temp_free_i32(fp32
);
9566 check_cp1_registers(ctx
, fs
);
9568 TCGv_i32 fp32
= tcg_temp_new_i32();
9569 TCGv_i64 fp64
= tcg_temp_new_i64();
9571 gen_load_fpr64(ctx
, fp64
, fs
);
9573 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9575 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9577 tcg_temp_free_i64(fp64
);
9578 gen_store_fpr32(ctx
, fp32
, fd
);
9579 tcg_temp_free_i32(fp32
);
9583 check_cp1_registers(ctx
, fs
);
9585 TCGv_i32 fp32
= tcg_temp_new_i32();
9586 TCGv_i64 fp64
= tcg_temp_new_i64();
9588 gen_load_fpr64(ctx
, fp64
, fs
);
9590 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9592 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9594 tcg_temp_free_i64(fp64
);
9595 gen_store_fpr32(ctx
, fp32
, fd
);
9596 tcg_temp_free_i32(fp32
);
9600 check_insn(ctx
, ISA_MIPS32R6
);
9601 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9604 check_insn(ctx
, ISA_MIPS32R6
);
9605 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9608 check_insn(ctx
, ISA_MIPS32R6
);
9609 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9613 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9616 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9618 TCGLabel
*l1
= gen_new_label();
9622 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9624 fp0
= tcg_temp_new_i64();
9625 gen_load_fpr64(ctx
, fp0
, fs
);
9626 gen_store_fpr64(ctx
, fp0
, fd
);
9627 tcg_temp_free_i64(fp0
);
9632 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9634 TCGLabel
*l1
= gen_new_label();
9638 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9639 fp0
= tcg_temp_new_i64();
9640 gen_load_fpr64(ctx
, fp0
, fs
);
9641 gen_store_fpr64(ctx
, fp0
, fd
);
9642 tcg_temp_free_i64(fp0
);
9648 check_cp1_registers(ctx
, fs
| fd
);
9650 TCGv_i64 fp0
= tcg_temp_new_i64();
9652 gen_load_fpr64(ctx
, fp0
, fs
);
9653 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9654 gen_store_fpr64(ctx
, fp0
, fd
);
9655 tcg_temp_free_i64(fp0
);
9659 check_cp1_registers(ctx
, fs
| fd
);
9661 TCGv_i64 fp0
= tcg_temp_new_i64();
9663 gen_load_fpr64(ctx
, fp0
, fs
);
9664 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9665 gen_store_fpr64(ctx
, fp0
, fd
);
9666 tcg_temp_free_i64(fp0
);
9670 check_insn(ctx
, ISA_MIPS32R6
);
9672 TCGv_i64 fp0
= tcg_temp_new_i64();
9673 TCGv_i64 fp1
= tcg_temp_new_i64();
9674 TCGv_i64 fp2
= tcg_temp_new_i64();
9675 gen_load_fpr64(ctx
, fp0
, fs
);
9676 gen_load_fpr64(ctx
, fp1
, ft
);
9677 gen_load_fpr64(ctx
, fp2
, fd
);
9678 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9679 gen_store_fpr64(ctx
, fp2
, fd
);
9680 tcg_temp_free_i64(fp2
);
9681 tcg_temp_free_i64(fp1
);
9682 tcg_temp_free_i64(fp0
);
9686 check_insn(ctx
, ISA_MIPS32R6
);
9688 TCGv_i64 fp0
= tcg_temp_new_i64();
9689 TCGv_i64 fp1
= tcg_temp_new_i64();
9690 TCGv_i64 fp2
= tcg_temp_new_i64();
9691 gen_load_fpr64(ctx
, fp0
, fs
);
9692 gen_load_fpr64(ctx
, fp1
, ft
);
9693 gen_load_fpr64(ctx
, fp2
, fd
);
9694 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9695 gen_store_fpr64(ctx
, fp2
, fd
);
9696 tcg_temp_free_i64(fp2
);
9697 tcg_temp_free_i64(fp1
);
9698 tcg_temp_free_i64(fp0
);
9702 check_insn(ctx
, ISA_MIPS32R6
);
9704 TCGv_i64 fp0
= tcg_temp_new_i64();
9705 gen_load_fpr64(ctx
, fp0
, fs
);
9706 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9707 gen_store_fpr64(ctx
, fp0
, fd
);
9708 tcg_temp_free_i64(fp0
);
9712 check_insn(ctx
, ISA_MIPS32R6
);
9714 TCGv_i64 fp0
= tcg_temp_new_i64();
9715 gen_load_fpr64(ctx
, fp0
, fs
);
9716 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9717 gen_store_fpr64(ctx
, fp0
, fd
);
9718 tcg_temp_free_i64(fp0
);
9721 case OPC_MIN_D
: /* OPC_RECIP2_D */
9722 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9724 TCGv_i64 fp0
= tcg_temp_new_i64();
9725 TCGv_i64 fp1
= tcg_temp_new_i64();
9726 gen_load_fpr64(ctx
, fp0
, fs
);
9727 gen_load_fpr64(ctx
, fp1
, ft
);
9728 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9729 gen_store_fpr64(ctx
, fp1
, fd
);
9730 tcg_temp_free_i64(fp1
);
9731 tcg_temp_free_i64(fp0
);
9734 check_cp1_64bitmode(ctx
);
9736 TCGv_i64 fp0
= tcg_temp_new_i64();
9737 TCGv_i64 fp1
= tcg_temp_new_i64();
9739 gen_load_fpr64(ctx
, fp0
, fs
);
9740 gen_load_fpr64(ctx
, fp1
, ft
);
9741 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9742 tcg_temp_free_i64(fp1
);
9743 gen_store_fpr64(ctx
, fp0
, fd
);
9744 tcg_temp_free_i64(fp0
);
9748 case OPC_MINA_D
: /* OPC_RECIP1_D */
9749 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9751 TCGv_i64 fp0
= tcg_temp_new_i64();
9752 TCGv_i64 fp1
= tcg_temp_new_i64();
9753 gen_load_fpr64(ctx
, fp0
, fs
);
9754 gen_load_fpr64(ctx
, fp1
, ft
);
9755 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9756 gen_store_fpr64(ctx
, fp1
, fd
);
9757 tcg_temp_free_i64(fp1
);
9758 tcg_temp_free_i64(fp0
);
9761 check_cp1_64bitmode(ctx
);
9763 TCGv_i64 fp0
= tcg_temp_new_i64();
9765 gen_load_fpr64(ctx
, fp0
, fs
);
9766 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9767 gen_store_fpr64(ctx
, fp0
, fd
);
9768 tcg_temp_free_i64(fp0
);
9772 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9773 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9775 TCGv_i64 fp0
= tcg_temp_new_i64();
9776 TCGv_i64 fp1
= tcg_temp_new_i64();
9777 gen_load_fpr64(ctx
, fp0
, fs
);
9778 gen_load_fpr64(ctx
, fp1
, ft
);
9779 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9780 gen_store_fpr64(ctx
, fp1
, fd
);
9781 tcg_temp_free_i64(fp1
);
9782 tcg_temp_free_i64(fp0
);
9785 check_cp1_64bitmode(ctx
);
9787 TCGv_i64 fp0
= tcg_temp_new_i64();
9789 gen_load_fpr64(ctx
, fp0
, fs
);
9790 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9791 gen_store_fpr64(ctx
, fp0
, fd
);
9792 tcg_temp_free_i64(fp0
);
9796 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9797 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9799 TCGv_i64 fp0
= tcg_temp_new_i64();
9800 TCGv_i64 fp1
= tcg_temp_new_i64();
9801 gen_load_fpr64(ctx
, fp0
, fs
);
9802 gen_load_fpr64(ctx
, fp1
, ft
);
9803 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9804 gen_store_fpr64(ctx
, fp1
, fd
);
9805 tcg_temp_free_i64(fp1
);
9806 tcg_temp_free_i64(fp0
);
9809 check_cp1_64bitmode(ctx
);
9811 TCGv_i64 fp0
= tcg_temp_new_i64();
9812 TCGv_i64 fp1
= tcg_temp_new_i64();
9814 gen_load_fpr64(ctx
, fp0
, fs
);
9815 gen_load_fpr64(ctx
, fp1
, ft
);
9816 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9817 tcg_temp_free_i64(fp1
);
9818 gen_store_fpr64(ctx
, fp0
, fd
);
9819 tcg_temp_free_i64(fp0
);
9832 case OPC_CMP_NGLE_D
:
9839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9840 if (ctx
->opcode
& (1 << 6)) {
9841 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9843 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9847 check_cp1_registers(ctx
, fs
);
9849 TCGv_i32 fp32
= tcg_temp_new_i32();
9850 TCGv_i64 fp64
= tcg_temp_new_i64();
9852 gen_load_fpr64(ctx
, fp64
, fs
);
9853 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9854 tcg_temp_free_i64(fp64
);
9855 gen_store_fpr32(ctx
, fp32
, fd
);
9856 tcg_temp_free_i32(fp32
);
9860 check_cp1_registers(ctx
, fs
);
9862 TCGv_i32 fp32
= tcg_temp_new_i32();
9863 TCGv_i64 fp64
= tcg_temp_new_i64();
9865 gen_load_fpr64(ctx
, fp64
, fs
);
9867 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
9869 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
9871 tcg_temp_free_i64(fp64
);
9872 gen_store_fpr32(ctx
, fp32
, fd
);
9873 tcg_temp_free_i32(fp32
);
9877 check_cp1_64bitmode(ctx
);
9879 TCGv_i64 fp0
= tcg_temp_new_i64();
9881 gen_load_fpr64(ctx
, fp0
, fs
);
9883 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
9885 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
9887 gen_store_fpr64(ctx
, fp0
, fd
);
9888 tcg_temp_free_i64(fp0
);
9893 TCGv_i32 fp0
= tcg_temp_new_i32();
9895 gen_load_fpr32(ctx
, fp0
, fs
);
9896 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9897 gen_store_fpr32(ctx
, fp0
, fd
);
9898 tcg_temp_free_i32(fp0
);
9902 check_cp1_registers(ctx
, fd
);
9904 TCGv_i32 fp32
= tcg_temp_new_i32();
9905 TCGv_i64 fp64
= tcg_temp_new_i64();
9907 gen_load_fpr32(ctx
, fp32
, fs
);
9908 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9909 tcg_temp_free_i32(fp32
);
9910 gen_store_fpr64(ctx
, fp64
, fd
);
9911 tcg_temp_free_i64(fp64
);
9915 check_cp1_64bitmode(ctx
);
9917 TCGv_i32 fp32
= tcg_temp_new_i32();
9918 TCGv_i64 fp64
= tcg_temp_new_i64();
9920 gen_load_fpr64(ctx
, fp64
, fs
);
9921 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9922 tcg_temp_free_i64(fp64
);
9923 gen_store_fpr32(ctx
, fp32
, fd
);
9924 tcg_temp_free_i32(fp32
);
9928 check_cp1_64bitmode(ctx
);
9930 TCGv_i64 fp0
= tcg_temp_new_i64();
9932 gen_load_fpr64(ctx
, fp0
, fs
);
9933 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9934 gen_store_fpr64(ctx
, fp0
, fd
);
9935 tcg_temp_free_i64(fp0
);
9941 TCGv_i64 fp0
= tcg_temp_new_i64();
9943 gen_load_fpr64(ctx
, fp0
, fs
);
9944 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9945 gen_store_fpr64(ctx
, fp0
, fd
);
9946 tcg_temp_free_i64(fp0
);
9952 TCGv_i64 fp0
= tcg_temp_new_i64();
9953 TCGv_i64 fp1
= tcg_temp_new_i64();
9955 gen_load_fpr64(ctx
, fp0
, fs
);
9956 gen_load_fpr64(ctx
, fp1
, ft
);
9957 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9958 tcg_temp_free_i64(fp1
);
9959 gen_store_fpr64(ctx
, fp0
, fd
);
9960 tcg_temp_free_i64(fp0
);
9966 TCGv_i64 fp0
= tcg_temp_new_i64();
9967 TCGv_i64 fp1
= tcg_temp_new_i64();
9969 gen_load_fpr64(ctx
, fp0
, fs
);
9970 gen_load_fpr64(ctx
, fp1
, ft
);
9971 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9972 tcg_temp_free_i64(fp1
);
9973 gen_store_fpr64(ctx
, fp0
, fd
);
9974 tcg_temp_free_i64(fp0
);
9980 TCGv_i64 fp0
= tcg_temp_new_i64();
9981 TCGv_i64 fp1
= tcg_temp_new_i64();
9983 gen_load_fpr64(ctx
, fp0
, fs
);
9984 gen_load_fpr64(ctx
, fp1
, ft
);
9985 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9986 tcg_temp_free_i64(fp1
);
9987 gen_store_fpr64(ctx
, fp0
, fd
);
9988 tcg_temp_free_i64(fp0
);
9994 TCGv_i64 fp0
= tcg_temp_new_i64();
9996 gen_load_fpr64(ctx
, fp0
, fs
);
9997 gen_helper_float_abs_ps(fp0
, fp0
);
9998 gen_store_fpr64(ctx
, fp0
, fd
);
9999 tcg_temp_free_i64(fp0
);
10005 TCGv_i64 fp0
= tcg_temp_new_i64();
10007 gen_load_fpr64(ctx
, fp0
, fs
);
10008 gen_store_fpr64(ctx
, fp0
, fd
);
10009 tcg_temp_free_i64(fp0
);
10015 TCGv_i64 fp0
= tcg_temp_new_i64();
10017 gen_load_fpr64(ctx
, fp0
, fs
);
10018 gen_helper_float_chs_ps(fp0
, fp0
);
10019 gen_store_fpr64(ctx
, fp0
, fd
);
10020 tcg_temp_free_i64(fp0
);
10025 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10030 TCGLabel
*l1
= gen_new_label();
10034 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10035 fp0
= tcg_temp_new_i64();
10036 gen_load_fpr64(ctx
, fp0
, fs
);
10037 gen_store_fpr64(ctx
, fp0
, fd
);
10038 tcg_temp_free_i64(fp0
);
10045 TCGLabel
*l1
= gen_new_label();
10049 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10050 fp0
= tcg_temp_new_i64();
10051 gen_load_fpr64(ctx
, fp0
, fs
);
10052 gen_store_fpr64(ctx
, fp0
, fd
);
10053 tcg_temp_free_i64(fp0
);
10061 TCGv_i64 fp0
= tcg_temp_new_i64();
10062 TCGv_i64 fp1
= tcg_temp_new_i64();
10064 gen_load_fpr64(ctx
, fp0
, ft
);
10065 gen_load_fpr64(ctx
, fp1
, fs
);
10066 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10067 tcg_temp_free_i64(fp1
);
10068 gen_store_fpr64(ctx
, fp0
, fd
);
10069 tcg_temp_free_i64(fp0
);
10075 TCGv_i64 fp0
= tcg_temp_new_i64();
10076 TCGv_i64 fp1
= tcg_temp_new_i64();
10078 gen_load_fpr64(ctx
, fp0
, ft
);
10079 gen_load_fpr64(ctx
, fp1
, fs
);
10080 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10081 tcg_temp_free_i64(fp1
);
10082 gen_store_fpr64(ctx
, fp0
, fd
);
10083 tcg_temp_free_i64(fp0
);
10086 case OPC_RECIP2_PS
:
10089 TCGv_i64 fp0
= tcg_temp_new_i64();
10090 TCGv_i64 fp1
= tcg_temp_new_i64();
10092 gen_load_fpr64(ctx
, fp0
, fs
);
10093 gen_load_fpr64(ctx
, fp1
, ft
);
10094 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10095 tcg_temp_free_i64(fp1
);
10096 gen_store_fpr64(ctx
, fp0
, fd
);
10097 tcg_temp_free_i64(fp0
);
10100 case OPC_RECIP1_PS
:
10103 TCGv_i64 fp0
= tcg_temp_new_i64();
10105 gen_load_fpr64(ctx
, fp0
, fs
);
10106 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10107 gen_store_fpr64(ctx
, fp0
, fd
);
10108 tcg_temp_free_i64(fp0
);
10111 case OPC_RSQRT1_PS
:
10114 TCGv_i64 fp0
= tcg_temp_new_i64();
10116 gen_load_fpr64(ctx
, fp0
, fs
);
10117 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10118 gen_store_fpr64(ctx
, fp0
, fd
);
10119 tcg_temp_free_i64(fp0
);
10122 case OPC_RSQRT2_PS
:
10125 TCGv_i64 fp0
= tcg_temp_new_i64();
10126 TCGv_i64 fp1
= tcg_temp_new_i64();
10128 gen_load_fpr64(ctx
, fp0
, fs
);
10129 gen_load_fpr64(ctx
, fp1
, ft
);
10130 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10131 tcg_temp_free_i64(fp1
);
10132 gen_store_fpr64(ctx
, fp0
, fd
);
10133 tcg_temp_free_i64(fp0
);
10137 check_cp1_64bitmode(ctx
);
10139 TCGv_i32 fp0
= tcg_temp_new_i32();
10141 gen_load_fpr32h(ctx
, fp0
, fs
);
10142 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10143 gen_store_fpr32(ctx
, fp0
, fd
);
10144 tcg_temp_free_i32(fp0
);
10147 case OPC_CVT_PW_PS
:
10150 TCGv_i64 fp0
= tcg_temp_new_i64();
10152 gen_load_fpr64(ctx
, fp0
, fs
);
10153 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10154 gen_store_fpr64(ctx
, fp0
, fd
);
10155 tcg_temp_free_i64(fp0
);
10159 check_cp1_64bitmode(ctx
);
10161 TCGv_i32 fp0
= tcg_temp_new_i32();
10163 gen_load_fpr32(ctx
, fp0
, fs
);
10164 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10165 gen_store_fpr32(ctx
, fp0
, fd
);
10166 tcg_temp_free_i32(fp0
);
10172 TCGv_i32 fp0
= tcg_temp_new_i32();
10173 TCGv_i32 fp1
= tcg_temp_new_i32();
10175 gen_load_fpr32(ctx
, fp0
, fs
);
10176 gen_load_fpr32(ctx
, fp1
, ft
);
10177 gen_store_fpr32h(ctx
, fp0
, fd
);
10178 gen_store_fpr32(ctx
, fp1
, fd
);
10179 tcg_temp_free_i32(fp0
);
10180 tcg_temp_free_i32(fp1
);
10186 TCGv_i32 fp0
= tcg_temp_new_i32();
10187 TCGv_i32 fp1
= tcg_temp_new_i32();
10189 gen_load_fpr32(ctx
, fp0
, fs
);
10190 gen_load_fpr32h(ctx
, fp1
, ft
);
10191 gen_store_fpr32(ctx
, fp1
, fd
);
10192 gen_store_fpr32h(ctx
, fp0
, fd
);
10193 tcg_temp_free_i32(fp0
);
10194 tcg_temp_free_i32(fp1
);
10200 TCGv_i32 fp0
= tcg_temp_new_i32();
10201 TCGv_i32 fp1
= tcg_temp_new_i32();
10203 gen_load_fpr32h(ctx
, fp0
, fs
);
10204 gen_load_fpr32(ctx
, fp1
, ft
);
10205 gen_store_fpr32(ctx
, fp1
, fd
);
10206 gen_store_fpr32h(ctx
, fp0
, fd
);
10207 tcg_temp_free_i32(fp0
);
10208 tcg_temp_free_i32(fp1
);
10214 TCGv_i32 fp0
= tcg_temp_new_i32();
10215 TCGv_i32 fp1
= tcg_temp_new_i32();
10217 gen_load_fpr32h(ctx
, fp0
, fs
);
10218 gen_load_fpr32h(ctx
, fp1
, ft
);
10219 gen_store_fpr32(ctx
, fp1
, fd
);
10220 gen_store_fpr32h(ctx
, fp0
, fd
);
10221 tcg_temp_free_i32(fp0
);
10222 tcg_temp_free_i32(fp1
);
10226 case OPC_CMP_UN_PS
:
10227 case OPC_CMP_EQ_PS
:
10228 case OPC_CMP_UEQ_PS
:
10229 case OPC_CMP_OLT_PS
:
10230 case OPC_CMP_ULT_PS
:
10231 case OPC_CMP_OLE_PS
:
10232 case OPC_CMP_ULE_PS
:
10233 case OPC_CMP_SF_PS
:
10234 case OPC_CMP_NGLE_PS
:
10235 case OPC_CMP_SEQ_PS
:
10236 case OPC_CMP_NGL_PS
:
10237 case OPC_CMP_LT_PS
:
10238 case OPC_CMP_NGE_PS
:
10239 case OPC_CMP_LE_PS
:
10240 case OPC_CMP_NGT_PS
:
10241 if (ctx
->opcode
& (1 << 6)) {
10242 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10244 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10248 MIPS_INVAL("farith");
10249 generate_exception_end(ctx
, EXCP_RI
);
10254 /* Coprocessor 3 (FPU) */
10255 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10256 int fd
, int fs
, int base
, int index
)
10258 TCGv t0
= tcg_temp_new();
10261 gen_load_gpr(t0
, index
);
10262 } else if (index
== 0) {
10263 gen_load_gpr(t0
, base
);
10265 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10267 /* Don't do NOP if destination is zero: we must perform the actual
10273 TCGv_i32 fp0
= tcg_temp_new_i32();
10275 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10276 tcg_gen_trunc_tl_i32(fp0
, t0
);
10277 gen_store_fpr32(ctx
, fp0
, fd
);
10278 tcg_temp_free_i32(fp0
);
10283 check_cp1_registers(ctx
, fd
);
10285 TCGv_i64 fp0
= tcg_temp_new_i64();
10286 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10287 gen_store_fpr64(ctx
, fp0
, fd
);
10288 tcg_temp_free_i64(fp0
);
10292 check_cp1_64bitmode(ctx
);
10293 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10295 TCGv_i64 fp0
= tcg_temp_new_i64();
10297 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10298 gen_store_fpr64(ctx
, fp0
, fd
);
10299 tcg_temp_free_i64(fp0
);
10305 TCGv_i32 fp0
= tcg_temp_new_i32();
10306 gen_load_fpr32(ctx
, fp0
, fs
);
10307 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10308 tcg_temp_free_i32(fp0
);
10313 check_cp1_registers(ctx
, fs
);
10315 TCGv_i64 fp0
= tcg_temp_new_i64();
10316 gen_load_fpr64(ctx
, fp0
, fs
);
10317 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10318 tcg_temp_free_i64(fp0
);
10322 check_cp1_64bitmode(ctx
);
10323 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10325 TCGv_i64 fp0
= tcg_temp_new_i64();
10326 gen_load_fpr64(ctx
, fp0
, fs
);
10327 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10328 tcg_temp_free_i64(fp0
);
10335 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10336 int fd
, int fr
, int fs
, int ft
)
10342 TCGv t0
= tcg_temp_local_new();
10343 TCGv_i32 fp
= tcg_temp_new_i32();
10344 TCGv_i32 fph
= tcg_temp_new_i32();
10345 TCGLabel
*l1
= gen_new_label();
10346 TCGLabel
*l2
= gen_new_label();
10348 gen_load_gpr(t0
, fr
);
10349 tcg_gen_andi_tl(t0
, t0
, 0x7);
10351 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10352 gen_load_fpr32(ctx
, fp
, fs
);
10353 gen_load_fpr32h(ctx
, fph
, fs
);
10354 gen_store_fpr32(ctx
, fp
, fd
);
10355 gen_store_fpr32h(ctx
, fph
, fd
);
10358 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10360 #ifdef TARGET_WORDS_BIGENDIAN
10361 gen_load_fpr32(ctx
, fp
, fs
);
10362 gen_load_fpr32h(ctx
, fph
, ft
);
10363 gen_store_fpr32h(ctx
, fp
, fd
);
10364 gen_store_fpr32(ctx
, fph
, fd
);
10366 gen_load_fpr32h(ctx
, fph
, fs
);
10367 gen_load_fpr32(ctx
, fp
, ft
);
10368 gen_store_fpr32(ctx
, fph
, fd
);
10369 gen_store_fpr32h(ctx
, fp
, fd
);
10372 tcg_temp_free_i32(fp
);
10373 tcg_temp_free_i32(fph
);
10379 TCGv_i32 fp0
= tcg_temp_new_i32();
10380 TCGv_i32 fp1
= tcg_temp_new_i32();
10381 TCGv_i32 fp2
= tcg_temp_new_i32();
10383 gen_load_fpr32(ctx
, fp0
, fs
);
10384 gen_load_fpr32(ctx
, fp1
, ft
);
10385 gen_load_fpr32(ctx
, fp2
, fr
);
10386 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10387 tcg_temp_free_i32(fp0
);
10388 tcg_temp_free_i32(fp1
);
10389 gen_store_fpr32(ctx
, fp2
, fd
);
10390 tcg_temp_free_i32(fp2
);
10395 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10397 TCGv_i64 fp0
= tcg_temp_new_i64();
10398 TCGv_i64 fp1
= tcg_temp_new_i64();
10399 TCGv_i64 fp2
= tcg_temp_new_i64();
10401 gen_load_fpr64(ctx
, fp0
, fs
);
10402 gen_load_fpr64(ctx
, fp1
, ft
);
10403 gen_load_fpr64(ctx
, fp2
, fr
);
10404 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10405 tcg_temp_free_i64(fp0
);
10406 tcg_temp_free_i64(fp1
);
10407 gen_store_fpr64(ctx
, fp2
, fd
);
10408 tcg_temp_free_i64(fp2
);
10414 TCGv_i64 fp0
= tcg_temp_new_i64();
10415 TCGv_i64 fp1
= tcg_temp_new_i64();
10416 TCGv_i64 fp2
= tcg_temp_new_i64();
10418 gen_load_fpr64(ctx
, fp0
, fs
);
10419 gen_load_fpr64(ctx
, fp1
, ft
);
10420 gen_load_fpr64(ctx
, fp2
, fr
);
10421 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10422 tcg_temp_free_i64(fp0
);
10423 tcg_temp_free_i64(fp1
);
10424 gen_store_fpr64(ctx
, fp2
, fd
);
10425 tcg_temp_free_i64(fp2
);
10431 TCGv_i32 fp0
= tcg_temp_new_i32();
10432 TCGv_i32 fp1
= tcg_temp_new_i32();
10433 TCGv_i32 fp2
= tcg_temp_new_i32();
10435 gen_load_fpr32(ctx
, fp0
, fs
);
10436 gen_load_fpr32(ctx
, fp1
, ft
);
10437 gen_load_fpr32(ctx
, fp2
, fr
);
10438 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10439 tcg_temp_free_i32(fp0
);
10440 tcg_temp_free_i32(fp1
);
10441 gen_store_fpr32(ctx
, fp2
, fd
);
10442 tcg_temp_free_i32(fp2
);
10447 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10449 TCGv_i64 fp0
= tcg_temp_new_i64();
10450 TCGv_i64 fp1
= tcg_temp_new_i64();
10451 TCGv_i64 fp2
= tcg_temp_new_i64();
10453 gen_load_fpr64(ctx
, fp0
, fs
);
10454 gen_load_fpr64(ctx
, fp1
, ft
);
10455 gen_load_fpr64(ctx
, fp2
, fr
);
10456 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10457 tcg_temp_free_i64(fp0
);
10458 tcg_temp_free_i64(fp1
);
10459 gen_store_fpr64(ctx
, fp2
, fd
);
10460 tcg_temp_free_i64(fp2
);
10466 TCGv_i64 fp0
= tcg_temp_new_i64();
10467 TCGv_i64 fp1
= tcg_temp_new_i64();
10468 TCGv_i64 fp2
= tcg_temp_new_i64();
10470 gen_load_fpr64(ctx
, fp0
, fs
);
10471 gen_load_fpr64(ctx
, fp1
, ft
);
10472 gen_load_fpr64(ctx
, fp2
, fr
);
10473 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10474 tcg_temp_free_i64(fp0
);
10475 tcg_temp_free_i64(fp1
);
10476 gen_store_fpr64(ctx
, fp2
, fd
);
10477 tcg_temp_free_i64(fp2
);
10483 TCGv_i32 fp0
= tcg_temp_new_i32();
10484 TCGv_i32 fp1
= tcg_temp_new_i32();
10485 TCGv_i32 fp2
= tcg_temp_new_i32();
10487 gen_load_fpr32(ctx
, fp0
, fs
);
10488 gen_load_fpr32(ctx
, fp1
, ft
);
10489 gen_load_fpr32(ctx
, fp2
, fr
);
10490 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10491 tcg_temp_free_i32(fp0
);
10492 tcg_temp_free_i32(fp1
);
10493 gen_store_fpr32(ctx
, fp2
, fd
);
10494 tcg_temp_free_i32(fp2
);
10499 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10501 TCGv_i64 fp0
= tcg_temp_new_i64();
10502 TCGv_i64 fp1
= tcg_temp_new_i64();
10503 TCGv_i64 fp2
= tcg_temp_new_i64();
10505 gen_load_fpr64(ctx
, fp0
, fs
);
10506 gen_load_fpr64(ctx
, fp1
, ft
);
10507 gen_load_fpr64(ctx
, fp2
, fr
);
10508 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10509 tcg_temp_free_i64(fp0
);
10510 tcg_temp_free_i64(fp1
);
10511 gen_store_fpr64(ctx
, fp2
, fd
);
10512 tcg_temp_free_i64(fp2
);
10518 TCGv_i64 fp0
= tcg_temp_new_i64();
10519 TCGv_i64 fp1
= tcg_temp_new_i64();
10520 TCGv_i64 fp2
= tcg_temp_new_i64();
10522 gen_load_fpr64(ctx
, fp0
, fs
);
10523 gen_load_fpr64(ctx
, fp1
, ft
);
10524 gen_load_fpr64(ctx
, fp2
, fr
);
10525 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10526 tcg_temp_free_i64(fp0
);
10527 tcg_temp_free_i64(fp1
);
10528 gen_store_fpr64(ctx
, fp2
, fd
);
10529 tcg_temp_free_i64(fp2
);
10535 TCGv_i32 fp0
= tcg_temp_new_i32();
10536 TCGv_i32 fp1
= tcg_temp_new_i32();
10537 TCGv_i32 fp2
= tcg_temp_new_i32();
10539 gen_load_fpr32(ctx
, fp0
, fs
);
10540 gen_load_fpr32(ctx
, fp1
, ft
);
10541 gen_load_fpr32(ctx
, fp2
, fr
);
10542 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10543 tcg_temp_free_i32(fp0
);
10544 tcg_temp_free_i32(fp1
);
10545 gen_store_fpr32(ctx
, fp2
, fd
);
10546 tcg_temp_free_i32(fp2
);
10551 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10553 TCGv_i64 fp0
= tcg_temp_new_i64();
10554 TCGv_i64 fp1
= tcg_temp_new_i64();
10555 TCGv_i64 fp2
= tcg_temp_new_i64();
10557 gen_load_fpr64(ctx
, fp0
, fs
);
10558 gen_load_fpr64(ctx
, fp1
, ft
);
10559 gen_load_fpr64(ctx
, fp2
, fr
);
10560 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10561 tcg_temp_free_i64(fp0
);
10562 tcg_temp_free_i64(fp1
);
10563 gen_store_fpr64(ctx
, fp2
, fd
);
10564 tcg_temp_free_i64(fp2
);
10570 TCGv_i64 fp0
= tcg_temp_new_i64();
10571 TCGv_i64 fp1
= tcg_temp_new_i64();
10572 TCGv_i64 fp2
= tcg_temp_new_i64();
10574 gen_load_fpr64(ctx
, fp0
, fs
);
10575 gen_load_fpr64(ctx
, fp1
, ft
);
10576 gen_load_fpr64(ctx
, fp2
, fr
);
10577 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10578 tcg_temp_free_i64(fp0
);
10579 tcg_temp_free_i64(fp1
);
10580 gen_store_fpr64(ctx
, fp2
, fd
);
10581 tcg_temp_free_i64(fp2
);
10585 MIPS_INVAL("flt3_arith");
10586 generate_exception_end(ctx
, EXCP_RI
);
10591 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10595 #if !defined(CONFIG_USER_ONLY)
10596 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10597 Therefore only check the ISA in system mode. */
10598 check_insn(ctx
, ISA_MIPS32R2
);
10600 t0
= tcg_temp_new();
10604 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10605 gen_store_gpr(t0
, rt
);
10608 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10609 gen_store_gpr(t0
, rt
);
10612 gen_helper_rdhwr_cc(t0
, cpu_env
);
10613 gen_store_gpr(t0
, rt
);
10616 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10617 gen_store_gpr(t0
, rt
);
10620 check_insn(ctx
, ISA_MIPS32R6
);
10622 /* Performance counter registers are not implemented other than
10623 * control register 0.
10625 generate_exception(ctx
, EXCP_RI
);
10627 gen_helper_rdhwr_performance(t0
, cpu_env
);
10628 gen_store_gpr(t0
, rt
);
10631 check_insn(ctx
, ISA_MIPS32R6
);
10632 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10633 gen_store_gpr(t0
, rt
);
10636 #if defined(CONFIG_USER_ONLY)
10637 tcg_gen_ld_tl(t0
, cpu_env
,
10638 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10639 gen_store_gpr(t0
, rt
);
10642 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10643 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10644 tcg_gen_ld_tl(t0
, cpu_env
,
10645 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10646 gen_store_gpr(t0
, rt
);
10648 generate_exception_end(ctx
, EXCP_RI
);
10652 default: /* Invalid */
10653 MIPS_INVAL("rdhwr");
10654 generate_exception_end(ctx
, EXCP_RI
);
10660 static inline void clear_branch_hflags(DisasContext
*ctx
)
10662 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10663 if (ctx
->bstate
== BS_NONE
) {
10664 save_cpu_state(ctx
, 0);
10666 /* it is not safe to save ctx->hflags as hflags may be changed
10667 in execution time by the instruction in delay / forbidden slot. */
10668 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10672 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10674 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10675 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10676 /* Branches completion */
10677 clear_branch_hflags(ctx
);
10678 ctx
->bstate
= BS_BRANCH
;
10679 /* FIXME: Need to clear can_do_io. */
10680 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10681 case MIPS_HFLAG_FBNSLOT
:
10682 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10685 /* unconditional branch */
10686 if (proc_hflags
& MIPS_HFLAG_BX
) {
10687 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10689 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10691 case MIPS_HFLAG_BL
:
10692 /* blikely taken case */
10693 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10695 case MIPS_HFLAG_BC
:
10696 /* Conditional branch */
10698 TCGLabel
*l1
= gen_new_label();
10700 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10701 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10703 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10706 case MIPS_HFLAG_BR
:
10707 /* unconditional branch to register */
10708 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10709 TCGv t0
= tcg_temp_new();
10710 TCGv_i32 t1
= tcg_temp_new_i32();
10712 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10713 tcg_gen_trunc_tl_i32(t1
, t0
);
10715 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10716 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10717 tcg_gen_or_i32(hflags
, hflags
, t1
);
10718 tcg_temp_free_i32(t1
);
10720 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10722 tcg_gen_mov_tl(cpu_PC
, btarget
);
10724 if (ctx
->singlestep_enabled
) {
10725 save_cpu_state(ctx
, 0);
10726 gen_helper_raise_exception_debug(cpu_env
);
10728 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
10731 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10737 /* Compact Branches */
10738 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10739 int rs
, int rt
, int32_t offset
)
10741 int bcond_compute
= 0;
10742 TCGv t0
= tcg_temp_new();
10743 TCGv t1
= tcg_temp_new();
10744 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10746 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10747 #ifdef MIPS_DEBUG_DISAS
10748 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10751 generate_exception_end(ctx
, EXCP_RI
);
10755 /* Load needed operands and calculate btarget */
10757 /* compact branch */
10758 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10759 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10760 gen_load_gpr(t0
, rs
);
10761 gen_load_gpr(t1
, rt
);
10763 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10764 if (rs
<= rt
&& rs
== 0) {
10765 /* OPC_BEQZALC, OPC_BNEZALC */
10766 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10769 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10770 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10771 gen_load_gpr(t0
, rs
);
10772 gen_load_gpr(t1
, rt
);
10774 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10776 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10777 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10778 if (rs
== 0 || rs
== rt
) {
10779 /* OPC_BLEZALC, OPC_BGEZALC */
10780 /* OPC_BGTZALC, OPC_BLTZALC */
10781 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10783 gen_load_gpr(t0
, rs
);
10784 gen_load_gpr(t1
, rt
);
10786 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10790 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10795 /* OPC_BEQZC, OPC_BNEZC */
10796 gen_load_gpr(t0
, rs
);
10798 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10800 /* OPC_JIC, OPC_JIALC */
10801 TCGv tbase
= tcg_temp_new();
10802 TCGv toffset
= tcg_temp_new();
10804 gen_load_gpr(tbase
, rt
);
10805 tcg_gen_movi_tl(toffset
, offset
);
10806 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10807 tcg_temp_free(tbase
);
10808 tcg_temp_free(toffset
);
10812 MIPS_INVAL("Compact branch/jump");
10813 generate_exception_end(ctx
, EXCP_RI
);
10817 if (bcond_compute
== 0) {
10818 /* Uncoditional compact branch */
10821 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10824 ctx
->hflags
|= MIPS_HFLAG_BR
;
10827 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10830 ctx
->hflags
|= MIPS_HFLAG_B
;
10833 MIPS_INVAL("Compact branch/jump");
10834 generate_exception_end(ctx
, EXCP_RI
);
10838 /* Generating branch here as compact branches don't have delay slot */
10839 gen_branch(ctx
, 4);
10841 /* Conditional compact branch */
10842 TCGLabel
*fs
= gen_new_label();
10843 save_cpu_state(ctx
, 0);
10846 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10847 if (rs
== 0 && rt
!= 0) {
10849 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10850 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10852 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10855 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10858 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10859 if (rs
== 0 && rt
!= 0) {
10861 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10862 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10864 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10867 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10870 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10871 if (rs
== 0 && rt
!= 0) {
10873 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10874 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10876 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10879 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10882 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10883 if (rs
== 0 && rt
!= 0) {
10885 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10886 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10888 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10891 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10894 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10895 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10897 /* OPC_BOVC, OPC_BNVC */
10898 TCGv t2
= tcg_temp_new();
10899 TCGv t3
= tcg_temp_new();
10900 TCGv t4
= tcg_temp_new();
10901 TCGv input_overflow
= tcg_temp_new();
10903 gen_load_gpr(t0
, rs
);
10904 gen_load_gpr(t1
, rt
);
10905 tcg_gen_ext32s_tl(t2
, t0
);
10906 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10907 tcg_gen_ext32s_tl(t3
, t1
);
10908 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10909 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10911 tcg_gen_add_tl(t4
, t2
, t3
);
10912 tcg_gen_ext32s_tl(t4
, t4
);
10913 tcg_gen_xor_tl(t2
, t2
, t3
);
10914 tcg_gen_xor_tl(t3
, t4
, t3
);
10915 tcg_gen_andc_tl(t2
, t3
, t2
);
10916 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10917 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10918 if (opc
== OPC_BOVC
) {
10920 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10923 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10925 tcg_temp_free(input_overflow
);
10929 } else if (rs
< rt
&& rs
== 0) {
10930 /* OPC_BEQZALC, OPC_BNEZALC */
10931 if (opc
== OPC_BEQZALC
) {
10933 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10936 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10939 /* OPC_BEQC, OPC_BNEC */
10940 if (opc
== OPC_BEQC
) {
10942 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10945 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10950 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10953 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10956 MIPS_INVAL("Compact conditional branch/jump");
10957 generate_exception_end(ctx
, EXCP_RI
);
10961 /* Generating branch here as compact branches don't have delay slot */
10962 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10965 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10973 /* ISA extensions (ASEs) */
10974 /* MIPS16 extension to MIPS32 */
10976 /* MIPS16 major opcodes */
10978 M16_OPC_ADDIUSP
= 0x00,
10979 M16_OPC_ADDIUPC
= 0x01,
10981 M16_OPC_JAL
= 0x03,
10982 M16_OPC_BEQZ
= 0x04,
10983 M16_OPC_BNEQZ
= 0x05,
10984 M16_OPC_SHIFT
= 0x06,
10986 M16_OPC_RRIA
= 0x08,
10987 M16_OPC_ADDIU8
= 0x09,
10988 M16_OPC_SLTI
= 0x0a,
10989 M16_OPC_SLTIU
= 0x0b,
10992 M16_OPC_CMPI
= 0x0e,
10996 M16_OPC_LWSP
= 0x12,
10998 M16_OPC_LBU
= 0x14,
10999 M16_OPC_LHU
= 0x15,
11000 M16_OPC_LWPC
= 0x16,
11001 M16_OPC_LWU
= 0x17,
11004 M16_OPC_SWSP
= 0x1a,
11006 M16_OPC_RRR
= 0x1c,
11008 M16_OPC_EXTEND
= 0x1e,
11012 /* I8 funct field */
11031 /* RR funct field */
11065 /* I64 funct field */
11073 I64_DADDIUPC
= 0x6,
11077 /* RR ry field for CNVT */
11079 RR_RY_CNVT_ZEB
= 0x0,
11080 RR_RY_CNVT_ZEH
= 0x1,
11081 RR_RY_CNVT_ZEW
= 0x2,
11082 RR_RY_CNVT_SEB
= 0x4,
11083 RR_RY_CNVT_SEH
= 0x5,
11084 RR_RY_CNVT_SEW
= 0x6,
11087 static int xlat (int r
)
11089 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11094 static void gen_mips16_save (DisasContext
*ctx
,
11095 int xsregs
, int aregs
,
11096 int do_ra
, int do_s0
, int do_s1
,
11099 TCGv t0
= tcg_temp_new();
11100 TCGv t1
= tcg_temp_new();
11101 TCGv t2
= tcg_temp_new();
11131 generate_exception_end(ctx
, EXCP_RI
);
11137 gen_base_offset_addr(ctx
, t0
, 29, 12);
11138 gen_load_gpr(t1
, 7);
11139 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11142 gen_base_offset_addr(ctx
, t0
, 29, 8);
11143 gen_load_gpr(t1
, 6);
11144 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11147 gen_base_offset_addr(ctx
, t0
, 29, 4);
11148 gen_load_gpr(t1
, 5);
11149 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11152 gen_base_offset_addr(ctx
, t0
, 29, 0);
11153 gen_load_gpr(t1
, 4);
11154 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11157 gen_load_gpr(t0
, 29);
11159 #define DECR_AND_STORE(reg) do { \
11160 tcg_gen_movi_tl(t2, -4); \
11161 gen_op_addr_add(ctx, t0, t0, t2); \
11162 gen_load_gpr(t1, reg); \
11163 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11167 DECR_AND_STORE(31);
11172 DECR_AND_STORE(30);
11175 DECR_AND_STORE(23);
11178 DECR_AND_STORE(22);
11181 DECR_AND_STORE(21);
11184 DECR_AND_STORE(20);
11187 DECR_AND_STORE(19);
11190 DECR_AND_STORE(18);
11194 DECR_AND_STORE(17);
11197 DECR_AND_STORE(16);
11227 generate_exception_end(ctx
, EXCP_RI
);
11243 #undef DECR_AND_STORE
11245 tcg_gen_movi_tl(t2
, -framesize
);
11246 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11252 static void gen_mips16_restore (DisasContext
*ctx
,
11253 int xsregs
, int aregs
,
11254 int do_ra
, int do_s0
, int do_s1
,
11258 TCGv t0
= tcg_temp_new();
11259 TCGv t1
= tcg_temp_new();
11260 TCGv t2
= tcg_temp_new();
11262 tcg_gen_movi_tl(t2
, framesize
);
11263 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11265 #define DECR_AND_LOAD(reg) do { \
11266 tcg_gen_movi_tl(t2, -4); \
11267 gen_op_addr_add(ctx, t0, t0, t2); \
11268 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11269 gen_store_gpr(t1, reg); \
11333 generate_exception_end(ctx
, EXCP_RI
);
11349 #undef DECR_AND_LOAD
11351 tcg_gen_movi_tl(t2
, framesize
);
11352 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11358 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11359 int is_64_bit
, int extended
)
11363 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11364 generate_exception_end(ctx
, EXCP_RI
);
11368 t0
= tcg_temp_new();
11370 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11371 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11373 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11379 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11382 TCGv_i32 t0
= tcg_const_i32(op
);
11383 TCGv t1
= tcg_temp_new();
11384 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11385 gen_helper_cache(cpu_env
, t1
, t0
);
11388 #if defined(TARGET_MIPS64)
11389 static void decode_i64_mips16 (DisasContext
*ctx
,
11390 int ry
, int funct
, int16_t offset
,
11395 check_insn(ctx
, ISA_MIPS3
);
11396 check_mips_64(ctx
);
11397 offset
= extended
? offset
: offset
<< 3;
11398 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11401 check_insn(ctx
, ISA_MIPS3
);
11402 check_mips_64(ctx
);
11403 offset
= extended
? offset
: offset
<< 3;
11404 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11407 check_insn(ctx
, ISA_MIPS3
);
11408 check_mips_64(ctx
);
11409 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11410 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11413 check_insn(ctx
, ISA_MIPS3
);
11414 check_mips_64(ctx
);
11415 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11416 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11419 check_insn(ctx
, ISA_MIPS3
);
11420 check_mips_64(ctx
);
11421 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11422 generate_exception_end(ctx
, EXCP_RI
);
11424 offset
= extended
? offset
: offset
<< 3;
11425 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11429 check_insn(ctx
, ISA_MIPS3
);
11430 check_mips_64(ctx
);
11431 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11432 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11435 check_insn(ctx
, ISA_MIPS3
);
11436 check_mips_64(ctx
);
11437 offset
= extended
? offset
: offset
<< 2;
11438 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11441 check_insn(ctx
, ISA_MIPS3
);
11442 check_mips_64(ctx
);
11443 offset
= extended
? offset
: offset
<< 2;
11444 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11450 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11452 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11453 int op
, rx
, ry
, funct
, sa
;
11454 int16_t imm
, offset
;
11456 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11457 op
= (ctx
->opcode
>> 11) & 0x1f;
11458 sa
= (ctx
->opcode
>> 22) & 0x1f;
11459 funct
= (ctx
->opcode
>> 8) & 0x7;
11460 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11461 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11462 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11463 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11464 | (ctx
->opcode
& 0x1f));
11466 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11469 case M16_OPC_ADDIUSP
:
11470 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11472 case M16_OPC_ADDIUPC
:
11473 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11476 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11477 /* No delay slot, so just process as a normal instruction */
11480 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11481 /* No delay slot, so just process as a normal instruction */
11483 case M16_OPC_BNEQZ
:
11484 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11485 /* No delay slot, so just process as a normal instruction */
11487 case M16_OPC_SHIFT
:
11488 switch (ctx
->opcode
& 0x3) {
11490 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11493 #if defined(TARGET_MIPS64)
11494 check_mips_64(ctx
);
11495 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11497 generate_exception_end(ctx
, EXCP_RI
);
11501 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11504 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11508 #if defined(TARGET_MIPS64)
11510 check_insn(ctx
, ISA_MIPS3
);
11511 check_mips_64(ctx
);
11512 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11516 imm
= ctx
->opcode
& 0xf;
11517 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11518 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11519 imm
= (int16_t) (imm
<< 1) >> 1;
11520 if ((ctx
->opcode
>> 4) & 0x1) {
11521 #if defined(TARGET_MIPS64)
11522 check_mips_64(ctx
);
11523 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11525 generate_exception_end(ctx
, EXCP_RI
);
11528 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11531 case M16_OPC_ADDIU8
:
11532 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11535 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11537 case M16_OPC_SLTIU
:
11538 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11543 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11546 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11549 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11552 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11555 check_insn(ctx
, ISA_MIPS32
);
11557 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11558 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11559 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11560 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11561 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11562 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11563 | (ctx
->opcode
& 0xf)) << 3;
11565 if (ctx
->opcode
& (1 << 7)) {
11566 gen_mips16_save(ctx
, xsregs
, aregs
,
11567 do_ra
, do_s0
, do_s1
,
11570 gen_mips16_restore(ctx
, xsregs
, aregs
,
11571 do_ra
, do_s0
, do_s1
,
11577 generate_exception_end(ctx
, EXCP_RI
);
11582 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11585 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11587 #if defined(TARGET_MIPS64)
11589 check_insn(ctx
, ISA_MIPS3
);
11590 check_mips_64(ctx
);
11591 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11595 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11598 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11601 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11604 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11607 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11610 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11613 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11615 #if defined(TARGET_MIPS64)
11617 check_insn(ctx
, ISA_MIPS3
);
11618 check_mips_64(ctx
);
11619 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11623 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11626 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11629 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11632 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11634 #if defined(TARGET_MIPS64)
11636 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11640 generate_exception_end(ctx
, EXCP_RI
);
11647 static inline bool is_uhi(int sdbbp_code
)
11649 #ifdef CONFIG_USER_ONLY
11652 return semihosting_enabled() && sdbbp_code
== 1;
11656 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11660 int op
, cnvt_op
, op1
, offset
;
11664 op
= (ctx
->opcode
>> 11) & 0x1f;
11665 sa
= (ctx
->opcode
>> 2) & 0x7;
11666 sa
= sa
== 0 ? 8 : sa
;
11667 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11668 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11669 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11670 op1
= offset
= ctx
->opcode
& 0x1f;
11675 case M16_OPC_ADDIUSP
:
11677 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11679 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11682 case M16_OPC_ADDIUPC
:
11683 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11686 offset
= (ctx
->opcode
& 0x7ff) << 1;
11687 offset
= (int16_t)(offset
<< 4) >> 4;
11688 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11689 /* No delay slot, so just process as a normal instruction */
11692 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11693 offset
= (((ctx
->opcode
& 0x1f) << 21)
11694 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11696 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11697 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11701 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11702 ((int8_t)ctx
->opcode
) << 1, 0);
11703 /* No delay slot, so just process as a normal instruction */
11705 case M16_OPC_BNEQZ
:
11706 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11707 ((int8_t)ctx
->opcode
) << 1, 0);
11708 /* No delay slot, so just process as a normal instruction */
11710 case M16_OPC_SHIFT
:
11711 switch (ctx
->opcode
& 0x3) {
11713 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11716 #if defined(TARGET_MIPS64)
11717 check_insn(ctx
, ISA_MIPS3
);
11718 check_mips_64(ctx
);
11719 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11721 generate_exception_end(ctx
, EXCP_RI
);
11725 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11728 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11732 #if defined(TARGET_MIPS64)
11734 check_insn(ctx
, ISA_MIPS3
);
11735 check_mips_64(ctx
);
11736 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11741 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11743 if ((ctx
->opcode
>> 4) & 1) {
11744 #if defined(TARGET_MIPS64)
11745 check_insn(ctx
, ISA_MIPS3
);
11746 check_mips_64(ctx
);
11747 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11749 generate_exception_end(ctx
, EXCP_RI
);
11752 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11756 case M16_OPC_ADDIU8
:
11758 int16_t imm
= (int8_t) ctx
->opcode
;
11760 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11765 int16_t imm
= (uint8_t) ctx
->opcode
;
11766 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11769 case M16_OPC_SLTIU
:
11771 int16_t imm
= (uint8_t) ctx
->opcode
;
11772 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11779 funct
= (ctx
->opcode
>> 8) & 0x7;
11782 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11783 ((int8_t)ctx
->opcode
) << 1, 0);
11786 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11787 ((int8_t)ctx
->opcode
) << 1, 0);
11790 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11793 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11794 ((int8_t)ctx
->opcode
) << 3);
11797 check_insn(ctx
, ISA_MIPS32
);
11799 int do_ra
= ctx
->opcode
& (1 << 6);
11800 int do_s0
= ctx
->opcode
& (1 << 5);
11801 int do_s1
= ctx
->opcode
& (1 << 4);
11802 int framesize
= ctx
->opcode
& 0xf;
11804 if (framesize
== 0) {
11807 framesize
= framesize
<< 3;
11810 if (ctx
->opcode
& (1 << 7)) {
11811 gen_mips16_save(ctx
, 0, 0,
11812 do_ra
, do_s0
, do_s1
, framesize
);
11814 gen_mips16_restore(ctx
, 0, 0,
11815 do_ra
, do_s0
, do_s1
, framesize
);
11821 int rz
= xlat(ctx
->opcode
& 0x7);
11823 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11824 ((ctx
->opcode
>> 5) & 0x7);
11825 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11829 reg32
= ctx
->opcode
& 0x1f;
11830 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11833 generate_exception_end(ctx
, EXCP_RI
);
11840 int16_t imm
= (uint8_t) ctx
->opcode
;
11842 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11847 int16_t imm
= (uint8_t) ctx
->opcode
;
11848 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11851 #if defined(TARGET_MIPS64)
11853 check_insn(ctx
, ISA_MIPS3
);
11854 check_mips_64(ctx
);
11855 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11859 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11862 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11865 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11868 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11871 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11874 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11877 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11879 #if defined (TARGET_MIPS64)
11881 check_insn(ctx
, ISA_MIPS3
);
11882 check_mips_64(ctx
);
11883 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11887 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11890 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11893 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11896 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11900 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11903 switch (ctx
->opcode
& 0x3) {
11905 mips32_op
= OPC_ADDU
;
11908 mips32_op
= OPC_SUBU
;
11910 #if defined(TARGET_MIPS64)
11912 mips32_op
= OPC_DADDU
;
11913 check_insn(ctx
, ISA_MIPS3
);
11914 check_mips_64(ctx
);
11917 mips32_op
= OPC_DSUBU
;
11918 check_insn(ctx
, ISA_MIPS3
);
11919 check_mips_64(ctx
);
11923 generate_exception_end(ctx
, EXCP_RI
);
11927 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11936 int nd
= (ctx
->opcode
>> 7) & 0x1;
11937 int link
= (ctx
->opcode
>> 6) & 0x1;
11938 int ra
= (ctx
->opcode
>> 5) & 0x1;
11941 check_insn(ctx
, ISA_MIPS32
);
11950 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11955 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11956 gen_helper_do_semihosting(cpu_env
);
11958 /* XXX: not clear which exception should be raised
11959 * when in debug mode...
11961 check_insn(ctx
, ISA_MIPS32
);
11962 generate_exception_end(ctx
, EXCP_DBp
);
11966 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11969 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11972 generate_exception_end(ctx
, EXCP_BREAK
);
11975 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11978 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11981 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11983 #if defined (TARGET_MIPS64)
11985 check_insn(ctx
, ISA_MIPS3
);
11986 check_mips_64(ctx
);
11987 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11991 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11994 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11997 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12000 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12003 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12006 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12009 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12012 check_insn(ctx
, ISA_MIPS32
);
12014 case RR_RY_CNVT_ZEB
:
12015 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12017 case RR_RY_CNVT_ZEH
:
12018 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12020 case RR_RY_CNVT_SEB
:
12021 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12023 case RR_RY_CNVT_SEH
:
12024 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12026 #if defined (TARGET_MIPS64)
12027 case RR_RY_CNVT_ZEW
:
12028 check_insn(ctx
, ISA_MIPS64
);
12029 check_mips_64(ctx
);
12030 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12032 case RR_RY_CNVT_SEW
:
12033 check_insn(ctx
, ISA_MIPS64
);
12034 check_mips_64(ctx
);
12035 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12039 generate_exception_end(ctx
, EXCP_RI
);
12044 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12046 #if defined (TARGET_MIPS64)
12048 check_insn(ctx
, ISA_MIPS3
);
12049 check_mips_64(ctx
);
12050 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12053 check_insn(ctx
, ISA_MIPS3
);
12054 check_mips_64(ctx
);
12055 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12058 check_insn(ctx
, ISA_MIPS3
);
12059 check_mips_64(ctx
);
12060 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12063 check_insn(ctx
, ISA_MIPS3
);
12064 check_mips_64(ctx
);
12065 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12069 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12072 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12075 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12078 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12080 #if defined (TARGET_MIPS64)
12082 check_insn(ctx
, ISA_MIPS3
);
12083 check_mips_64(ctx
);
12084 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12087 check_insn(ctx
, ISA_MIPS3
);
12088 check_mips_64(ctx
);
12089 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12092 check_insn(ctx
, ISA_MIPS3
);
12093 check_mips_64(ctx
);
12094 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12097 check_insn(ctx
, ISA_MIPS3
);
12098 check_mips_64(ctx
);
12099 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12103 generate_exception_end(ctx
, EXCP_RI
);
12107 case M16_OPC_EXTEND
:
12108 decode_extended_mips16_opc(env
, ctx
);
12111 #if defined(TARGET_MIPS64)
12113 funct
= (ctx
->opcode
>> 8) & 0x7;
12114 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12118 generate_exception_end(ctx
, EXCP_RI
);
12125 /* microMIPS extension to MIPS32/MIPS64 */
12128 * microMIPS32/microMIPS64 major opcodes
12130 * 1. MIPS Architecture for Programmers Volume II-B:
12131 * The microMIPS32 Instruction Set (Revision 3.05)
12133 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12135 * 2. MIPS Architecture For Programmers Volume II-A:
12136 * The MIPS64 Instruction Set (Revision 3.51)
12166 POOL32S
= 0x16, /* MIPS64 */
12167 DADDIU32
= 0x17, /* MIPS64 */
12196 /* 0x29 is reserved */
12209 /* 0x31 is reserved */
12222 SD32
= 0x36, /* MIPS64 */
12223 LD32
= 0x37, /* MIPS64 */
12225 /* 0x39 is reserved */
12241 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12251 /* POOL32A encoding of minor opcode field */
12254 /* These opcodes are distinguished only by bits 9..6; those bits are
12255 * what are recorded below. */
12292 /* The following can be distinguished by their lower 6 bits. */
12302 /* POOL32AXF encoding of minor opcode field extension */
12305 * 1. MIPS Architecture for Programmers Volume II-B:
12306 * The microMIPS32 Instruction Set (Revision 3.05)
12308 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12310 * 2. MIPS Architecture for Programmers VolumeIV-e:
12311 * The MIPS DSP Application-Specific Extension
12312 * to the microMIPS32 Architecture (Revision 2.34)
12314 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12329 /* begin of microMIPS32 DSP */
12331 /* bits 13..12 for 0x01 */
12337 /* bits 13..12 for 0x2a */
12343 /* bits 13..12 for 0x32 */
12347 /* end of microMIPS32 DSP */
12349 /* bits 15..12 for 0x2c */
12366 /* bits 15..12 for 0x34 */
12374 /* bits 15..12 for 0x3c */
12376 JR
= 0x0, /* alias */
12384 /* bits 15..12 for 0x05 */
12388 /* bits 15..12 for 0x0d */
12400 /* bits 15..12 for 0x15 */
12406 /* bits 15..12 for 0x1d */
12410 /* bits 15..12 for 0x2d */
12415 /* bits 15..12 for 0x35 */
12422 /* POOL32B encoding of minor opcode field (bits 15..12) */
12438 /* POOL32C encoding of minor opcode field (bits 15..12) */
12446 /* 0xa is reserved */
12453 /* 0x6 is reserved */
12459 /* POOL32F encoding of minor opcode field (bits 5..0) */
12462 /* These are the bit 7..6 values */
12471 /* These are the bit 8..6 values */
12496 MOVZ_FMT_05
= 0x05,
12530 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12537 /* POOL32Fxf encoding of minor opcode extension field */
12575 /* POOL32I encoding of minor opcode field (bits 25..21) */
12605 /* These overlap and are distinguished by bit16 of the instruction */
12614 /* POOL16A encoding of minor opcode field */
12621 /* POOL16B encoding of minor opcode field */
12628 /* POOL16C encoding of minor opcode field */
12648 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12668 /* POOL16D encoding of minor opcode field */
12675 /* POOL16E encoding of minor opcode field */
12682 static int mmreg (int r
)
12684 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12689 /* Used for 16-bit store instructions. */
12690 static int mmreg2 (int r
)
12692 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12697 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12698 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12699 #define uMIPS_RS2(op) uMIPS_RS(op)
12700 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12701 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12702 #define uMIPS_RS5(op) (op & 0x1f)
12704 /* Signed immediate */
12705 #define SIMM(op, start, width) \
12706 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12709 /* Zero-extended immediate */
12710 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12712 static void gen_addiur1sp(DisasContext
*ctx
)
12714 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12716 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12719 static void gen_addiur2(DisasContext
*ctx
)
12721 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12722 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12723 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12725 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12728 static void gen_addiusp(DisasContext
*ctx
)
12730 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12733 if (encoded
<= 1) {
12734 decoded
= 256 + encoded
;
12735 } else if (encoded
<= 255) {
12737 } else if (encoded
<= 509) {
12738 decoded
= encoded
- 512;
12740 decoded
= encoded
- 768;
12743 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12746 static void gen_addius5(DisasContext
*ctx
)
12748 int imm
= SIMM(ctx
->opcode
, 1, 4);
12749 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12751 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12754 static void gen_andi16(DisasContext
*ctx
)
12756 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12757 31, 32, 63, 64, 255, 32768, 65535 };
12758 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12759 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12760 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12762 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12765 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12766 int base
, int16_t offset
)
12771 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12772 generate_exception_end(ctx
, EXCP_RI
);
12776 t0
= tcg_temp_new();
12778 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12780 t1
= tcg_const_tl(reglist
);
12781 t2
= tcg_const_i32(ctx
->mem_idx
);
12783 save_cpu_state(ctx
, 1);
12786 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12789 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12791 #ifdef TARGET_MIPS64
12793 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12796 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12802 tcg_temp_free_i32(t2
);
12806 static void gen_pool16c_insn(DisasContext
*ctx
)
12808 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12809 int rs
= mmreg(ctx
->opcode
& 0x7);
12811 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12816 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12822 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12828 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12834 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12841 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12842 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12844 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12853 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12854 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12856 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12863 int reg
= ctx
->opcode
& 0x1f;
12865 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12871 int reg
= ctx
->opcode
& 0x1f;
12872 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12873 /* Let normal delay slot handling in our caller take us
12874 to the branch target. */
12879 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12880 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12884 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12885 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12889 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12893 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12896 generate_exception_end(ctx
, EXCP_BREAK
);
12899 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12900 gen_helper_do_semihosting(cpu_env
);
12902 /* XXX: not clear which exception should be raised
12903 * when in debug mode...
12905 check_insn(ctx
, ISA_MIPS32
);
12906 generate_exception_end(ctx
, EXCP_DBp
);
12909 case JRADDIUSP
+ 0:
12910 case JRADDIUSP
+ 1:
12912 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12913 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12914 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12915 /* Let normal delay slot handling in our caller take us
12916 to the branch target. */
12920 generate_exception_end(ctx
, EXCP_RI
);
12925 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12928 int rd
, rs
, re
, rt
;
12929 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12930 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12931 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12932 rd
= rd_enc
[enc_dest
];
12933 re
= re_enc
[enc_dest
];
12934 rs
= rs_rt_enc
[enc_rs
];
12935 rt
= rs_rt_enc
[enc_rt
];
12937 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12939 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12942 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12944 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12948 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12950 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12951 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12953 switch (ctx
->opcode
& 0xf) {
12955 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12958 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12962 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12963 int offset
= extract32(ctx
->opcode
, 4, 4);
12964 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12967 case R6_JRC16
: /* JRCADDIUSP */
12968 if ((ctx
->opcode
>> 4) & 1) {
12970 int imm
= extract32(ctx
->opcode
, 5, 5);
12971 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12972 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12975 int rs
= extract32(ctx
->opcode
, 5, 5);
12976 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12979 case MOVEP
... MOVEP_07
:
12980 case MOVEP_0C
... MOVEP_0F
:
12982 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12983 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12984 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12985 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12989 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12992 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12996 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12997 int offset
= extract32(ctx
->opcode
, 4, 4);
12998 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13001 case JALRC16
: /* BREAK16, SDBBP16 */
13002 switch (ctx
->opcode
& 0x3f) {
13004 case JALRC16
+ 0x20:
13006 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13011 generate_exception(ctx
, EXCP_BREAK
);
13015 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13016 gen_helper_do_semihosting(cpu_env
);
13018 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13019 generate_exception(ctx
, EXCP_RI
);
13021 generate_exception(ctx
, EXCP_DBp
);
13028 generate_exception(ctx
, EXCP_RI
);
13033 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13035 TCGv t0
= tcg_temp_new();
13036 TCGv t1
= tcg_temp_new();
13038 gen_load_gpr(t0
, base
);
13041 gen_load_gpr(t1
, index
);
13042 tcg_gen_shli_tl(t1
, t1
, 2);
13043 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13046 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13047 gen_store_gpr(t1
, rd
);
13053 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13054 int base
, int16_t offset
)
13058 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13059 generate_exception_end(ctx
, EXCP_RI
);
13063 t0
= tcg_temp_new();
13064 t1
= tcg_temp_new();
13066 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13071 generate_exception_end(ctx
, EXCP_RI
);
13074 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13075 gen_store_gpr(t1
, rd
);
13076 tcg_gen_movi_tl(t1
, 4);
13077 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13078 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13079 gen_store_gpr(t1
, rd
+1);
13082 gen_load_gpr(t1
, rd
);
13083 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13084 tcg_gen_movi_tl(t1
, 4);
13085 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13086 gen_load_gpr(t1
, rd
+1);
13087 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13089 #ifdef TARGET_MIPS64
13092 generate_exception_end(ctx
, EXCP_RI
);
13095 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13096 gen_store_gpr(t1
, rd
);
13097 tcg_gen_movi_tl(t1
, 8);
13098 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13099 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13100 gen_store_gpr(t1
, rd
+1);
13103 gen_load_gpr(t1
, rd
);
13104 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13105 tcg_gen_movi_tl(t1
, 8);
13106 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13107 gen_load_gpr(t1
, rd
+1);
13108 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13116 static void gen_sync(int stype
)
13118 TCGBar tcg_mo
= TCG_BAR_SC
;
13121 case 0x4: /* SYNC_WMB */
13122 tcg_mo
|= TCG_MO_ST_ST
;
13124 case 0x10: /* SYNC_MB */
13125 tcg_mo
|= TCG_MO_ALL
;
13127 case 0x11: /* SYNC_ACQUIRE */
13128 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13130 case 0x12: /* SYNC_RELEASE */
13131 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13133 case 0x13: /* SYNC_RMB */
13134 tcg_mo
|= TCG_MO_LD_LD
;
13137 tcg_mo
|= TCG_MO_ALL
;
13141 tcg_gen_mb(tcg_mo
);
13144 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13146 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13147 int minor
= (ctx
->opcode
>> 12) & 0xf;
13148 uint32_t mips32_op
;
13150 switch (extension
) {
13152 mips32_op
= OPC_TEQ
;
13155 mips32_op
= OPC_TGE
;
13158 mips32_op
= OPC_TGEU
;
13161 mips32_op
= OPC_TLT
;
13164 mips32_op
= OPC_TLTU
;
13167 mips32_op
= OPC_TNE
;
13169 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13171 #ifndef CONFIG_USER_ONLY
13174 check_cp0_enabled(ctx
);
13176 /* Treat as NOP. */
13179 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13183 check_cp0_enabled(ctx
);
13185 TCGv t0
= tcg_temp_new();
13187 gen_load_gpr(t0
, rt
);
13188 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13194 switch (minor
& 3) {
13196 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13199 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13202 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13205 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13208 goto pool32axf_invalid
;
13212 switch (minor
& 3) {
13214 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13217 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13220 goto pool32axf_invalid
;
13226 check_insn(ctx
, ISA_MIPS32R6
);
13227 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13230 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13233 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13236 mips32_op
= OPC_CLO
;
13239 mips32_op
= OPC_CLZ
;
13241 check_insn(ctx
, ISA_MIPS32
);
13242 gen_cl(ctx
, mips32_op
, rt
, rs
);
13245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13246 gen_rdhwr(ctx
, rt
, rs
, 0);
13249 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13253 mips32_op
= OPC_MULT
;
13256 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13257 mips32_op
= OPC_MULTU
;
13260 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13261 mips32_op
= OPC_DIV
;
13264 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13265 mips32_op
= OPC_DIVU
;
13268 check_insn(ctx
, ISA_MIPS32
);
13269 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13273 mips32_op
= OPC_MADD
;
13276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13277 mips32_op
= OPC_MADDU
;
13280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13281 mips32_op
= OPC_MSUB
;
13284 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13285 mips32_op
= OPC_MSUBU
;
13287 check_insn(ctx
, ISA_MIPS32
);
13288 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13291 goto pool32axf_invalid
;
13302 generate_exception_err(ctx
, EXCP_CpU
, 2);
13305 goto pool32axf_invalid
;
13310 case JALR
: /* JALRC */
13311 case JALR_HB
: /* JALRC_HB */
13312 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13313 /* JALRC, JALRC_HB */
13314 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13316 /* JALR, JALR_HB */
13317 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13318 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13323 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13324 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13325 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13328 goto pool32axf_invalid
;
13334 check_cp0_enabled(ctx
);
13335 check_insn(ctx
, ISA_MIPS32R2
);
13336 gen_load_srsgpr(rs
, rt
);
13339 check_cp0_enabled(ctx
);
13340 check_insn(ctx
, ISA_MIPS32R2
);
13341 gen_store_srsgpr(rs
, rt
);
13344 goto pool32axf_invalid
;
13347 #ifndef CONFIG_USER_ONLY
13351 mips32_op
= OPC_TLBP
;
13354 mips32_op
= OPC_TLBR
;
13357 mips32_op
= OPC_TLBWI
;
13360 mips32_op
= OPC_TLBWR
;
13363 mips32_op
= OPC_TLBINV
;
13366 mips32_op
= OPC_TLBINVF
;
13369 mips32_op
= OPC_WAIT
;
13372 mips32_op
= OPC_DERET
;
13375 mips32_op
= OPC_ERET
;
13377 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13380 goto pool32axf_invalid
;
13386 check_cp0_enabled(ctx
);
13388 TCGv t0
= tcg_temp_new();
13390 save_cpu_state(ctx
, 1);
13391 gen_helper_di(t0
, cpu_env
);
13392 gen_store_gpr(t0
, rs
);
13393 /* Stop translation as we may have switched the execution mode */
13394 ctx
->bstate
= BS_STOP
;
13399 check_cp0_enabled(ctx
);
13401 TCGv t0
= tcg_temp_new();
13403 save_cpu_state(ctx
, 1);
13404 gen_helper_ei(t0
, cpu_env
);
13405 gen_store_gpr(t0
, rs
);
13406 /* Stop translation as we may have switched the execution mode */
13407 ctx
->bstate
= BS_STOP
;
13412 goto pool32axf_invalid
;
13419 gen_sync(extract32(ctx
->opcode
, 16, 5));
13422 generate_exception_end(ctx
, EXCP_SYSCALL
);
13425 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13426 gen_helper_do_semihosting(cpu_env
);
13428 check_insn(ctx
, ISA_MIPS32
);
13429 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13430 generate_exception_end(ctx
, EXCP_RI
);
13432 generate_exception_end(ctx
, EXCP_DBp
);
13437 goto pool32axf_invalid
;
13441 switch (minor
& 3) {
13443 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13446 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13449 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13452 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13455 goto pool32axf_invalid
;
13459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13462 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13465 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13468 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13471 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13474 goto pool32axf_invalid
;
13479 MIPS_INVAL("pool32axf");
13480 generate_exception_end(ctx
, EXCP_RI
);
13485 /* Values for microMIPS fmt field. Variable-width, depending on which
13486 formats the instruction supports. */
13505 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13507 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13508 uint32_t mips32_op
;
13510 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13511 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13512 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13514 switch (extension
) {
13515 case FLOAT_1BIT_FMT(CFC1
, 0):
13516 mips32_op
= OPC_CFC1
;
13518 case FLOAT_1BIT_FMT(CTC1
, 0):
13519 mips32_op
= OPC_CTC1
;
13521 case FLOAT_1BIT_FMT(MFC1
, 0):
13522 mips32_op
= OPC_MFC1
;
13524 case FLOAT_1BIT_FMT(MTC1
, 0):
13525 mips32_op
= OPC_MTC1
;
13527 case FLOAT_1BIT_FMT(MFHC1
, 0):
13528 mips32_op
= OPC_MFHC1
;
13530 case FLOAT_1BIT_FMT(MTHC1
, 0):
13531 mips32_op
= OPC_MTHC1
;
13533 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13536 /* Reciprocal square root */
13537 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13538 mips32_op
= OPC_RSQRT_S
;
13540 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13541 mips32_op
= OPC_RSQRT_D
;
13545 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13546 mips32_op
= OPC_SQRT_S
;
13548 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13549 mips32_op
= OPC_SQRT_D
;
13553 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13554 mips32_op
= OPC_RECIP_S
;
13556 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13557 mips32_op
= OPC_RECIP_D
;
13561 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13562 mips32_op
= OPC_FLOOR_L_S
;
13564 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13565 mips32_op
= OPC_FLOOR_L_D
;
13567 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13568 mips32_op
= OPC_FLOOR_W_S
;
13570 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13571 mips32_op
= OPC_FLOOR_W_D
;
13575 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13576 mips32_op
= OPC_CEIL_L_S
;
13578 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13579 mips32_op
= OPC_CEIL_L_D
;
13581 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13582 mips32_op
= OPC_CEIL_W_S
;
13584 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13585 mips32_op
= OPC_CEIL_W_D
;
13589 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13590 mips32_op
= OPC_TRUNC_L_S
;
13592 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13593 mips32_op
= OPC_TRUNC_L_D
;
13595 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13596 mips32_op
= OPC_TRUNC_W_S
;
13598 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13599 mips32_op
= OPC_TRUNC_W_D
;
13603 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13604 mips32_op
= OPC_ROUND_L_S
;
13606 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13607 mips32_op
= OPC_ROUND_L_D
;
13609 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13610 mips32_op
= OPC_ROUND_W_S
;
13612 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13613 mips32_op
= OPC_ROUND_W_D
;
13616 /* Integer to floating-point conversion */
13617 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13618 mips32_op
= OPC_CVT_L_S
;
13620 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13621 mips32_op
= OPC_CVT_L_D
;
13623 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13624 mips32_op
= OPC_CVT_W_S
;
13626 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13627 mips32_op
= OPC_CVT_W_D
;
13630 /* Paired-foo conversions */
13631 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13632 mips32_op
= OPC_CVT_S_PL
;
13634 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13635 mips32_op
= OPC_CVT_S_PU
;
13637 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13638 mips32_op
= OPC_CVT_PW_PS
;
13640 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13641 mips32_op
= OPC_CVT_PS_PW
;
13644 /* Floating-point moves */
13645 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13646 mips32_op
= OPC_MOV_S
;
13648 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13649 mips32_op
= OPC_MOV_D
;
13651 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13652 mips32_op
= OPC_MOV_PS
;
13655 /* Absolute value */
13656 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13657 mips32_op
= OPC_ABS_S
;
13659 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13660 mips32_op
= OPC_ABS_D
;
13662 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13663 mips32_op
= OPC_ABS_PS
;
13667 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13668 mips32_op
= OPC_NEG_S
;
13670 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13671 mips32_op
= OPC_NEG_D
;
13673 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13674 mips32_op
= OPC_NEG_PS
;
13677 /* Reciprocal square root step */
13678 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13679 mips32_op
= OPC_RSQRT1_S
;
13681 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13682 mips32_op
= OPC_RSQRT1_D
;
13684 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13685 mips32_op
= OPC_RSQRT1_PS
;
13688 /* Reciprocal step */
13689 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13690 mips32_op
= OPC_RECIP1_S
;
13692 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13693 mips32_op
= OPC_RECIP1_S
;
13695 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13696 mips32_op
= OPC_RECIP1_PS
;
13699 /* Conversions from double */
13700 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13701 mips32_op
= OPC_CVT_D_S
;
13703 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13704 mips32_op
= OPC_CVT_D_W
;
13706 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13707 mips32_op
= OPC_CVT_D_L
;
13710 /* Conversions from single */
13711 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13712 mips32_op
= OPC_CVT_S_D
;
13714 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13715 mips32_op
= OPC_CVT_S_W
;
13717 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13718 mips32_op
= OPC_CVT_S_L
;
13720 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13723 /* Conditional moves on floating-point codes */
13724 case COND_FLOAT_MOV(MOVT
, 0):
13725 case COND_FLOAT_MOV(MOVT
, 1):
13726 case COND_FLOAT_MOV(MOVT
, 2):
13727 case COND_FLOAT_MOV(MOVT
, 3):
13728 case COND_FLOAT_MOV(MOVT
, 4):
13729 case COND_FLOAT_MOV(MOVT
, 5):
13730 case COND_FLOAT_MOV(MOVT
, 6):
13731 case COND_FLOAT_MOV(MOVT
, 7):
13732 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13733 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13735 case COND_FLOAT_MOV(MOVF
, 0):
13736 case COND_FLOAT_MOV(MOVF
, 1):
13737 case COND_FLOAT_MOV(MOVF
, 2):
13738 case COND_FLOAT_MOV(MOVF
, 3):
13739 case COND_FLOAT_MOV(MOVF
, 4):
13740 case COND_FLOAT_MOV(MOVF
, 5):
13741 case COND_FLOAT_MOV(MOVF
, 6):
13742 case COND_FLOAT_MOV(MOVF
, 7):
13743 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13744 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13747 MIPS_INVAL("pool32fxf");
13748 generate_exception_end(ctx
, EXCP_RI
);
13753 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13757 int rt
, rs
, rd
, rr
;
13759 uint32_t op
, minor
, mips32_op
;
13760 uint32_t cond
, fmt
, cc
;
13762 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13763 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13765 rt
= (ctx
->opcode
>> 21) & 0x1f;
13766 rs
= (ctx
->opcode
>> 16) & 0x1f;
13767 rd
= (ctx
->opcode
>> 11) & 0x1f;
13768 rr
= (ctx
->opcode
>> 6) & 0x1f;
13769 imm
= (int16_t) ctx
->opcode
;
13771 op
= (ctx
->opcode
>> 26) & 0x3f;
13774 minor
= ctx
->opcode
& 0x3f;
13777 minor
= (ctx
->opcode
>> 6) & 0xf;
13780 mips32_op
= OPC_SLL
;
13783 mips32_op
= OPC_SRA
;
13786 mips32_op
= OPC_SRL
;
13789 mips32_op
= OPC_ROTR
;
13791 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13794 check_insn(ctx
, ISA_MIPS32R6
);
13795 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13798 check_insn(ctx
, ISA_MIPS32R6
);
13799 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13802 check_insn(ctx
, ISA_MIPS32R6
);
13803 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13806 goto pool32a_invalid
;
13810 minor
= (ctx
->opcode
>> 6) & 0xf;
13814 mips32_op
= OPC_ADD
;
13817 mips32_op
= OPC_ADDU
;
13820 mips32_op
= OPC_SUB
;
13823 mips32_op
= OPC_SUBU
;
13826 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13827 mips32_op
= OPC_MUL
;
13829 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13833 mips32_op
= OPC_SLLV
;
13836 mips32_op
= OPC_SRLV
;
13839 mips32_op
= OPC_SRAV
;
13842 mips32_op
= OPC_ROTRV
;
13844 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13846 /* Logical operations */
13848 mips32_op
= OPC_AND
;
13851 mips32_op
= OPC_OR
;
13854 mips32_op
= OPC_NOR
;
13857 mips32_op
= OPC_XOR
;
13859 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13861 /* Set less than */
13863 mips32_op
= OPC_SLT
;
13866 mips32_op
= OPC_SLTU
;
13868 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13871 goto pool32a_invalid
;
13875 minor
= (ctx
->opcode
>> 6) & 0xf;
13877 /* Conditional moves */
13878 case MOVN
: /* MUL */
13879 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13881 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13884 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13887 case MOVZ
: /* MUH */
13888 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13890 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13893 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13897 check_insn(ctx
, ISA_MIPS32R6
);
13898 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13901 check_insn(ctx
, ISA_MIPS32R6
);
13902 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13904 case LWXS
: /* DIV */
13905 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13907 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13910 gen_ldxs(ctx
, rs
, rt
, rd
);
13914 check_insn(ctx
, ISA_MIPS32R6
);
13915 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13918 check_insn(ctx
, ISA_MIPS32R6
);
13919 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13922 check_insn(ctx
, ISA_MIPS32R6
);
13923 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13926 goto pool32a_invalid
;
13930 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13933 check_insn(ctx
, ISA_MIPS32R6
);
13934 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13935 extract32(ctx
->opcode
, 9, 2));
13938 check_insn(ctx
, ISA_MIPS32R6
);
13939 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13940 extract32(ctx
->opcode
, 9, 2));
13943 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13946 gen_pool32axf(env
, ctx
, rt
, rs
);
13949 generate_exception_end(ctx
, EXCP_BREAK
);
13952 check_insn(ctx
, ISA_MIPS32R6
);
13953 generate_exception_end(ctx
, EXCP_RI
);
13957 MIPS_INVAL("pool32a");
13958 generate_exception_end(ctx
, EXCP_RI
);
13963 minor
= (ctx
->opcode
>> 12) & 0xf;
13966 check_cp0_enabled(ctx
);
13967 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13968 gen_cache_operation(ctx
, rt
, rs
, imm
);
13973 /* COP2: Not implemented. */
13974 generate_exception_err(ctx
, EXCP_CpU
, 2);
13976 #ifdef TARGET_MIPS64
13979 check_insn(ctx
, ISA_MIPS3
);
13980 check_mips_64(ctx
);
13985 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13987 #ifdef TARGET_MIPS64
13990 check_insn(ctx
, ISA_MIPS3
);
13991 check_mips_64(ctx
);
13996 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13999 MIPS_INVAL("pool32b");
14000 generate_exception_end(ctx
, EXCP_RI
);
14005 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14006 minor
= ctx
->opcode
& 0x3f;
14007 check_cp1_enabled(ctx
);
14010 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14011 mips32_op
= OPC_ALNV_PS
;
14014 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14015 mips32_op
= OPC_MADD_S
;
14018 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14019 mips32_op
= OPC_MADD_D
;
14022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14023 mips32_op
= OPC_MADD_PS
;
14026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14027 mips32_op
= OPC_MSUB_S
;
14030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14031 mips32_op
= OPC_MSUB_D
;
14034 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14035 mips32_op
= OPC_MSUB_PS
;
14038 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14039 mips32_op
= OPC_NMADD_S
;
14042 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14043 mips32_op
= OPC_NMADD_D
;
14046 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14047 mips32_op
= OPC_NMADD_PS
;
14050 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14051 mips32_op
= OPC_NMSUB_S
;
14054 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14055 mips32_op
= OPC_NMSUB_D
;
14058 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14059 mips32_op
= OPC_NMSUB_PS
;
14061 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14063 case CABS_COND_FMT
:
14064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14065 cond
= (ctx
->opcode
>> 6) & 0xf;
14066 cc
= (ctx
->opcode
>> 13) & 0x7;
14067 fmt
= (ctx
->opcode
>> 10) & 0x3;
14070 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14073 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14076 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14079 goto pool32f_invalid
;
14083 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14084 cond
= (ctx
->opcode
>> 6) & 0xf;
14085 cc
= (ctx
->opcode
>> 13) & 0x7;
14086 fmt
= (ctx
->opcode
>> 10) & 0x3;
14089 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14092 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14095 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14098 goto pool32f_invalid
;
14102 check_insn(ctx
, ISA_MIPS32R6
);
14103 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14106 check_insn(ctx
, ISA_MIPS32R6
);
14107 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14110 gen_pool32fxf(ctx
, rt
, rs
);
14114 switch ((ctx
->opcode
>> 6) & 0x7) {
14116 mips32_op
= OPC_PLL_PS
;
14119 mips32_op
= OPC_PLU_PS
;
14122 mips32_op
= OPC_PUL_PS
;
14125 mips32_op
= OPC_PUU_PS
;
14128 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14129 mips32_op
= OPC_CVT_PS_S
;
14131 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14134 goto pool32f_invalid
;
14138 check_insn(ctx
, ISA_MIPS32R6
);
14139 switch ((ctx
->opcode
>> 9) & 0x3) {
14141 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14144 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14147 goto pool32f_invalid
;
14152 switch ((ctx
->opcode
>> 6) & 0x7) {
14154 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14155 mips32_op
= OPC_LWXC1
;
14158 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14159 mips32_op
= OPC_SWXC1
;
14162 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14163 mips32_op
= OPC_LDXC1
;
14166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14167 mips32_op
= OPC_SDXC1
;
14170 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14171 mips32_op
= OPC_LUXC1
;
14174 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14175 mips32_op
= OPC_SUXC1
;
14177 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14180 goto pool32f_invalid
;
14184 check_insn(ctx
, ISA_MIPS32R6
);
14185 switch ((ctx
->opcode
>> 9) & 0x3) {
14187 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14190 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14193 goto pool32f_invalid
;
14198 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14199 fmt
= (ctx
->opcode
>> 9) & 0x3;
14200 switch ((ctx
->opcode
>> 6) & 0x7) {
14204 mips32_op
= OPC_RSQRT2_S
;
14207 mips32_op
= OPC_RSQRT2_D
;
14210 mips32_op
= OPC_RSQRT2_PS
;
14213 goto pool32f_invalid
;
14219 mips32_op
= OPC_RECIP2_S
;
14222 mips32_op
= OPC_RECIP2_D
;
14225 mips32_op
= OPC_RECIP2_PS
;
14228 goto pool32f_invalid
;
14232 mips32_op
= OPC_ADDR_PS
;
14235 mips32_op
= OPC_MULR_PS
;
14237 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14240 goto pool32f_invalid
;
14244 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14245 cc
= (ctx
->opcode
>> 13) & 0x7;
14246 fmt
= (ctx
->opcode
>> 9) & 0x3;
14247 switch ((ctx
->opcode
>> 6) & 0x7) {
14248 case MOVF_FMT
: /* RINT_FMT */
14249 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14253 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14256 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14259 goto pool32f_invalid
;
14265 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14268 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14272 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14275 goto pool32f_invalid
;
14279 case MOVT_FMT
: /* CLASS_FMT */
14280 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14284 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14287 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14290 goto pool32f_invalid
;
14296 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14299 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14303 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14306 goto pool32f_invalid
;
14311 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14314 goto pool32f_invalid
;
14317 #define FINSN_3ARG_SDPS(prfx) \
14318 switch ((ctx->opcode >> 8) & 0x3) { \
14320 mips32_op = OPC_##prfx##_S; \
14323 mips32_op = OPC_##prfx##_D; \
14325 case FMT_SDPS_PS: \
14327 mips32_op = OPC_##prfx##_PS; \
14330 goto pool32f_invalid; \
14333 check_insn(ctx
, ISA_MIPS32R6
);
14334 switch ((ctx
->opcode
>> 9) & 0x3) {
14336 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14339 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14342 goto pool32f_invalid
;
14346 check_insn(ctx
, ISA_MIPS32R6
);
14347 switch ((ctx
->opcode
>> 9) & 0x3) {
14349 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14352 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14355 goto pool32f_invalid
;
14359 /* regular FP ops */
14360 switch ((ctx
->opcode
>> 6) & 0x3) {
14362 FINSN_3ARG_SDPS(ADD
);
14365 FINSN_3ARG_SDPS(SUB
);
14368 FINSN_3ARG_SDPS(MUL
);
14371 fmt
= (ctx
->opcode
>> 8) & 0x3;
14373 mips32_op
= OPC_DIV_D
;
14374 } else if (fmt
== 0) {
14375 mips32_op
= OPC_DIV_S
;
14377 goto pool32f_invalid
;
14381 goto pool32f_invalid
;
14386 switch ((ctx
->opcode
>> 6) & 0x7) {
14387 case MOVN_FMT
: /* SELNEZ_FMT */
14388 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14390 switch ((ctx
->opcode
>> 9) & 0x3) {
14392 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14395 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14398 goto pool32f_invalid
;
14402 FINSN_3ARG_SDPS(MOVN
);
14406 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14407 FINSN_3ARG_SDPS(MOVN
);
14409 case MOVZ_FMT
: /* SELEQZ_FMT */
14410 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14412 switch ((ctx
->opcode
>> 9) & 0x3) {
14414 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14417 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14420 goto pool32f_invalid
;
14424 FINSN_3ARG_SDPS(MOVZ
);
14428 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14429 FINSN_3ARG_SDPS(MOVZ
);
14432 check_insn(ctx
, ISA_MIPS32R6
);
14433 switch ((ctx
->opcode
>> 9) & 0x3) {
14435 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14438 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14441 goto pool32f_invalid
;
14445 check_insn(ctx
, ISA_MIPS32R6
);
14446 switch ((ctx
->opcode
>> 9) & 0x3) {
14448 mips32_op
= OPC_MADDF_S
;
14451 mips32_op
= OPC_MADDF_D
;
14454 goto pool32f_invalid
;
14458 check_insn(ctx
, ISA_MIPS32R6
);
14459 switch ((ctx
->opcode
>> 9) & 0x3) {
14461 mips32_op
= OPC_MSUBF_S
;
14464 mips32_op
= OPC_MSUBF_D
;
14467 goto pool32f_invalid
;
14471 goto pool32f_invalid
;
14475 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14479 MIPS_INVAL("pool32f");
14480 generate_exception_end(ctx
, EXCP_RI
);
14484 generate_exception_err(ctx
, EXCP_CpU
, 1);
14488 minor
= (ctx
->opcode
>> 21) & 0x1f;
14491 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14492 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14495 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14496 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14497 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14500 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14501 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14502 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14505 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14506 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14509 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14510 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14511 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14514 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14515 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14516 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14520 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14523 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14524 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14528 case TLTI
: /* BC1EQZC */
14529 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14531 check_cp1_enabled(ctx
);
14532 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14535 mips32_op
= OPC_TLTI
;
14539 case TGEI
: /* BC1NEZC */
14540 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14542 check_cp1_enabled(ctx
);
14543 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14546 mips32_op
= OPC_TGEI
;
14551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14552 mips32_op
= OPC_TLTIU
;
14555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14556 mips32_op
= OPC_TGEIU
;
14558 case TNEI
: /* SYNCI */
14559 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14561 /* Break the TB to be able to sync copied instructions
14563 ctx
->bstate
= BS_STOP
;
14566 mips32_op
= OPC_TNEI
;
14571 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14572 mips32_op
= OPC_TEQI
;
14574 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14579 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14580 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14581 4, rs
, 0, imm
<< 1, 0);
14582 /* Compact branches don't have a delay slot, so just let
14583 the normal delay slot handling take us to the branch
14587 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14588 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14592 /* Break the TB to be able to sync copied instructions
14594 ctx
->bstate
= BS_STOP
;
14598 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14599 /* COP2: Not implemented. */
14600 generate_exception_err(ctx
, EXCP_CpU
, 2);
14603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14604 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14607 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14608 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14611 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14612 mips32_op
= OPC_BC1FANY4
;
14615 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14616 mips32_op
= OPC_BC1TANY4
;
14619 check_insn(ctx
, ASE_MIPS3D
);
14622 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14623 check_cp1_enabled(ctx
);
14624 gen_compute_branch1(ctx
, mips32_op
,
14625 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14627 generate_exception_err(ctx
, EXCP_CpU
, 1);
14632 /* MIPS DSP: not implemented */
14635 MIPS_INVAL("pool32i");
14636 generate_exception_end(ctx
, EXCP_RI
);
14641 minor
= (ctx
->opcode
>> 12) & 0xf;
14642 offset
= sextract32(ctx
->opcode
, 0,
14643 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14647 mips32_op
= OPC_LWL
;
14650 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14651 mips32_op
= OPC_SWL
;
14654 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14655 mips32_op
= OPC_LWR
;
14658 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14659 mips32_op
= OPC_SWR
;
14661 #if defined(TARGET_MIPS64)
14663 check_insn(ctx
, ISA_MIPS3
);
14664 check_mips_64(ctx
);
14665 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14666 mips32_op
= OPC_LDL
;
14669 check_insn(ctx
, ISA_MIPS3
);
14670 check_mips_64(ctx
);
14671 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14672 mips32_op
= OPC_SDL
;
14675 check_insn(ctx
, ISA_MIPS3
);
14676 check_mips_64(ctx
);
14677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14678 mips32_op
= OPC_LDR
;
14681 check_insn(ctx
, ISA_MIPS3
);
14682 check_mips_64(ctx
);
14683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14684 mips32_op
= OPC_SDR
;
14687 check_insn(ctx
, ISA_MIPS3
);
14688 check_mips_64(ctx
);
14689 mips32_op
= OPC_LWU
;
14692 check_insn(ctx
, ISA_MIPS3
);
14693 check_mips_64(ctx
);
14694 mips32_op
= OPC_LLD
;
14698 mips32_op
= OPC_LL
;
14701 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14704 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14707 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14709 #if defined(TARGET_MIPS64)
14711 check_insn(ctx
, ISA_MIPS3
);
14712 check_mips_64(ctx
);
14713 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14717 /* Treat as no-op */
14718 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14719 /* hint codes 24-31 are reserved and signal RI */
14720 generate_exception(ctx
, EXCP_RI
);
14724 MIPS_INVAL("pool32c");
14725 generate_exception_end(ctx
, EXCP_RI
);
14729 case ADDI32
: /* AUI, LUI */
14730 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14732 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14735 mips32_op
= OPC_ADDI
;
14740 mips32_op
= OPC_ADDIU
;
14742 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14745 /* Logical operations */
14747 mips32_op
= OPC_ORI
;
14750 mips32_op
= OPC_XORI
;
14753 mips32_op
= OPC_ANDI
;
14755 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14758 /* Set less than immediate */
14760 mips32_op
= OPC_SLTI
;
14763 mips32_op
= OPC_SLTIU
;
14765 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14768 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14769 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14770 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14771 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14773 case JALS32
: /* BOVC, BEQC, BEQZALC */
14774 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14777 mips32_op
= OPC_BOVC
;
14778 } else if (rs
< rt
&& rs
== 0) {
14780 mips32_op
= OPC_BEQZALC
;
14783 mips32_op
= OPC_BEQC
;
14785 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14788 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14789 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14790 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14793 case BEQ32
: /* BC */
14794 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14796 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14797 sextract32(ctx
->opcode
<< 1, 0, 27));
14800 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14803 case BNE32
: /* BALC */
14804 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14806 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14807 sextract32(ctx
->opcode
<< 1, 0, 27));
14810 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14813 case J32
: /* BGTZC, BLTZC, BLTC */
14814 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14815 if (rs
== 0 && rt
!= 0) {
14817 mips32_op
= OPC_BGTZC
;
14818 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14820 mips32_op
= OPC_BLTZC
;
14823 mips32_op
= OPC_BLTC
;
14825 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14828 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14829 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14832 case JAL32
: /* BLEZC, BGEZC, BGEC */
14833 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14834 if (rs
== 0 && rt
!= 0) {
14836 mips32_op
= OPC_BLEZC
;
14837 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14839 mips32_op
= OPC_BGEZC
;
14842 mips32_op
= OPC_BGEC
;
14844 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14847 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14848 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14849 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14852 /* Floating point (COP1) */
14854 mips32_op
= OPC_LWC1
;
14857 mips32_op
= OPC_LDC1
;
14860 mips32_op
= OPC_SWC1
;
14863 mips32_op
= OPC_SDC1
;
14865 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14867 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14868 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14869 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14870 switch ((ctx
->opcode
>> 16) & 0x1f) {
14871 case ADDIUPC_00
... ADDIUPC_07
:
14872 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14875 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14878 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14880 case LWPC_08
... LWPC_0F
:
14881 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14884 generate_exception(ctx
, EXCP_RI
);
14889 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14890 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14892 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14895 case BNVC
: /* BNEC, BNEZALC */
14896 check_insn(ctx
, ISA_MIPS32R6
);
14899 mips32_op
= OPC_BNVC
;
14900 } else if (rs
< rt
&& rs
== 0) {
14902 mips32_op
= OPC_BNEZALC
;
14905 mips32_op
= OPC_BNEC
;
14907 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14909 case R6_BNEZC
: /* JIALC */
14910 check_insn(ctx
, ISA_MIPS32R6
);
14913 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14914 sextract32(ctx
->opcode
<< 1, 0, 22));
14917 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14920 case R6_BEQZC
: /* JIC */
14921 check_insn(ctx
, ISA_MIPS32R6
);
14924 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14925 sextract32(ctx
->opcode
<< 1, 0, 22));
14928 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14931 case BLEZALC
: /* BGEZALC, BGEUC */
14932 check_insn(ctx
, ISA_MIPS32R6
);
14933 if (rs
== 0 && rt
!= 0) {
14935 mips32_op
= OPC_BLEZALC
;
14936 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14938 mips32_op
= OPC_BGEZALC
;
14941 mips32_op
= OPC_BGEUC
;
14943 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14945 case BGTZALC
: /* BLTZALC, BLTUC */
14946 check_insn(ctx
, ISA_MIPS32R6
);
14947 if (rs
== 0 && rt
!= 0) {
14949 mips32_op
= OPC_BGTZALC
;
14950 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14952 mips32_op
= OPC_BLTZALC
;
14955 mips32_op
= OPC_BLTUC
;
14957 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14959 /* Loads and stores */
14961 mips32_op
= OPC_LB
;
14964 mips32_op
= OPC_LBU
;
14967 mips32_op
= OPC_LH
;
14970 mips32_op
= OPC_LHU
;
14973 mips32_op
= OPC_LW
;
14975 #ifdef TARGET_MIPS64
14977 check_insn(ctx
, ISA_MIPS3
);
14978 check_mips_64(ctx
);
14979 mips32_op
= OPC_LD
;
14982 check_insn(ctx
, ISA_MIPS3
);
14983 check_mips_64(ctx
);
14984 mips32_op
= OPC_SD
;
14988 mips32_op
= OPC_SB
;
14991 mips32_op
= OPC_SH
;
14994 mips32_op
= OPC_SW
;
14997 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15000 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15003 generate_exception_end(ctx
, EXCP_RI
);
15008 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15012 /* make sure instructions are on a halfword boundary */
15013 if (ctx
->pc
& 0x1) {
15014 env
->CP0_BadVAddr
= ctx
->pc
;
15015 generate_exception_end(ctx
, EXCP_AdEL
);
15019 op
= (ctx
->opcode
>> 10) & 0x3f;
15020 /* Enforce properly-sized instructions in a delay slot */
15021 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15022 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15024 /* POOL32A, POOL32B, POOL32I, POOL32C */
15026 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15028 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15030 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15032 /* LB32, LH32, LWC132, LDC132, LW32 */
15033 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15034 generate_exception_end(ctx
, EXCP_RI
);
15039 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15041 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15043 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15044 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15045 generate_exception_end(ctx
, EXCP_RI
);
15055 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15056 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15057 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15060 switch (ctx
->opcode
& 0x1) {
15068 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15069 /* In the Release 6 the register number location in
15070 * the instruction encoding has changed.
15072 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15074 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15080 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15081 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15082 int amount
= (ctx
->opcode
>> 1) & 0x7;
15084 amount
= amount
== 0 ? 8 : amount
;
15086 switch (ctx
->opcode
& 0x1) {
15095 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15099 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15100 gen_pool16c_r6_insn(ctx
);
15102 gen_pool16c_insn(ctx
);
15107 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15108 int rb
= 28; /* GP */
15109 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15111 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15115 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15116 if (ctx
->opcode
& 1) {
15117 generate_exception_end(ctx
, EXCP_RI
);
15120 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15121 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15122 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15123 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15128 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15129 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15130 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15131 offset
= (offset
== 0xf ? -1 : offset
);
15133 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15138 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15139 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15140 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15142 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15147 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15148 int rb
= 29; /* SP */
15149 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15151 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15156 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15157 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15158 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15160 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15165 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15166 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15167 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15169 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15174 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15175 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15176 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15178 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15183 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15184 int rb
= 29; /* SP */
15185 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15187 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15192 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15193 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15194 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15196 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15201 int rd
= uMIPS_RD5(ctx
->opcode
);
15202 int rs
= uMIPS_RS5(ctx
->opcode
);
15204 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15211 switch (ctx
->opcode
& 0x1) {
15221 switch (ctx
->opcode
& 0x1) {
15226 gen_addiur1sp(ctx
);
15230 case B16
: /* BC16 */
15231 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15232 sextract32(ctx
->opcode
, 0, 10) << 1,
15233 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15235 case BNEZ16
: /* BNEZC16 */
15236 case BEQZ16
: /* BEQZC16 */
15237 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15238 mmreg(uMIPS_RD(ctx
->opcode
)),
15239 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15240 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15245 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15246 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15248 imm
= (imm
== 0x7f ? -1 : imm
);
15249 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15255 generate_exception_end(ctx
, EXCP_RI
);
15258 decode_micromips32_opc(env
, ctx
);
15265 /* SmartMIPS extension to MIPS32 */
15267 #if defined(TARGET_MIPS64)
15269 /* MDMX extension to MIPS64 */
15273 /* MIPSDSP functions. */
15274 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15275 int rd
, int base
, int offset
)
15280 t0
= tcg_temp_new();
15283 gen_load_gpr(t0
, offset
);
15284 } else if (offset
== 0) {
15285 gen_load_gpr(t0
, base
);
15287 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15292 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15293 gen_store_gpr(t0
, rd
);
15296 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15297 gen_store_gpr(t0
, rd
);
15300 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15301 gen_store_gpr(t0
, rd
);
15303 #if defined(TARGET_MIPS64)
15305 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15306 gen_store_gpr(t0
, rd
);
15313 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15314 int ret
, int v1
, int v2
)
15320 /* Treat as NOP. */
15324 v1_t
= tcg_temp_new();
15325 v2_t
= tcg_temp_new();
15327 gen_load_gpr(v1_t
, v1
);
15328 gen_load_gpr(v2_t
, v2
);
15331 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15332 case OPC_MULT_G_2E
:
15336 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15338 case OPC_ADDUH_R_QB
:
15339 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15342 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15344 case OPC_ADDQH_R_PH
:
15345 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15348 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15350 case OPC_ADDQH_R_W
:
15351 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15354 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15356 case OPC_SUBUH_R_QB
:
15357 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15360 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15362 case OPC_SUBQH_R_PH
:
15363 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15366 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15368 case OPC_SUBQH_R_W
:
15369 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15373 case OPC_ABSQ_S_PH_DSP
:
15375 case OPC_ABSQ_S_QB
:
15377 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15379 case OPC_ABSQ_S_PH
:
15381 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15385 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15387 case OPC_PRECEQ_W_PHL
:
15389 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15390 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15392 case OPC_PRECEQ_W_PHR
:
15394 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15395 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15396 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15398 case OPC_PRECEQU_PH_QBL
:
15400 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15402 case OPC_PRECEQU_PH_QBR
:
15404 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15406 case OPC_PRECEQU_PH_QBLA
:
15408 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15410 case OPC_PRECEQU_PH_QBRA
:
15412 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15414 case OPC_PRECEU_PH_QBL
:
15416 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15418 case OPC_PRECEU_PH_QBR
:
15420 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15422 case OPC_PRECEU_PH_QBLA
:
15424 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15426 case OPC_PRECEU_PH_QBRA
:
15428 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15432 case OPC_ADDU_QB_DSP
:
15436 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15438 case OPC_ADDQ_S_PH
:
15440 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15444 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15448 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15450 case OPC_ADDU_S_QB
:
15452 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15456 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15458 case OPC_ADDU_S_PH
:
15460 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15464 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15466 case OPC_SUBQ_S_PH
:
15468 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15472 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15476 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15478 case OPC_SUBU_S_QB
:
15480 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15484 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15486 case OPC_SUBU_S_PH
:
15488 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15492 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15496 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15500 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15502 case OPC_RADDU_W_QB
:
15504 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15508 case OPC_CMPU_EQ_QB_DSP
:
15510 case OPC_PRECR_QB_PH
:
15512 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15514 case OPC_PRECRQ_QB_PH
:
15516 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15518 case OPC_PRECR_SRA_PH_W
:
15521 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15522 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15524 tcg_temp_free_i32(sa_t
);
15527 case OPC_PRECR_SRA_R_PH_W
:
15530 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15531 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15533 tcg_temp_free_i32(sa_t
);
15536 case OPC_PRECRQ_PH_W
:
15538 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15540 case OPC_PRECRQ_RS_PH_W
:
15542 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15544 case OPC_PRECRQU_S_QB_PH
:
15546 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15550 #ifdef TARGET_MIPS64
15551 case OPC_ABSQ_S_QH_DSP
:
15553 case OPC_PRECEQ_L_PWL
:
15555 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15557 case OPC_PRECEQ_L_PWR
:
15559 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15561 case OPC_PRECEQ_PW_QHL
:
15563 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15565 case OPC_PRECEQ_PW_QHR
:
15567 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15569 case OPC_PRECEQ_PW_QHLA
:
15571 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15573 case OPC_PRECEQ_PW_QHRA
:
15575 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15577 case OPC_PRECEQU_QH_OBL
:
15579 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15581 case OPC_PRECEQU_QH_OBR
:
15583 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15585 case OPC_PRECEQU_QH_OBLA
:
15587 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15589 case OPC_PRECEQU_QH_OBRA
:
15591 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15593 case OPC_PRECEU_QH_OBL
:
15595 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15597 case OPC_PRECEU_QH_OBR
:
15599 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15601 case OPC_PRECEU_QH_OBLA
:
15603 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15605 case OPC_PRECEU_QH_OBRA
:
15607 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15609 case OPC_ABSQ_S_OB
:
15611 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15613 case OPC_ABSQ_S_PW
:
15615 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15617 case OPC_ABSQ_S_QH
:
15619 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15623 case OPC_ADDU_OB_DSP
:
15625 case OPC_RADDU_L_OB
:
15627 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15631 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15633 case OPC_SUBQ_S_PW
:
15635 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15639 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15641 case OPC_SUBQ_S_QH
:
15643 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15647 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15649 case OPC_SUBU_S_OB
:
15651 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15655 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15657 case OPC_SUBU_S_QH
:
15659 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15663 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15665 case OPC_SUBUH_R_OB
:
15667 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15671 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15673 case OPC_ADDQ_S_PW
:
15675 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15679 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15681 case OPC_ADDQ_S_QH
:
15683 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15687 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15689 case OPC_ADDU_S_OB
:
15691 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15695 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15697 case OPC_ADDU_S_QH
:
15699 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15703 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15705 case OPC_ADDUH_R_OB
:
15707 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15711 case OPC_CMPU_EQ_OB_DSP
:
15713 case OPC_PRECR_OB_QH
:
15715 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15717 case OPC_PRECR_SRA_QH_PW
:
15720 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15721 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15722 tcg_temp_free_i32(ret_t
);
15725 case OPC_PRECR_SRA_R_QH_PW
:
15728 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15729 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15730 tcg_temp_free_i32(sa_v
);
15733 case OPC_PRECRQ_OB_QH
:
15735 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15737 case OPC_PRECRQ_PW_L
:
15739 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15741 case OPC_PRECRQ_QH_PW
:
15743 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15745 case OPC_PRECRQ_RS_QH_PW
:
15747 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15749 case OPC_PRECRQU_S_OB_QH
:
15751 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15758 tcg_temp_free(v1_t
);
15759 tcg_temp_free(v2_t
);
15762 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15763 int ret
, int v1
, int v2
)
15771 /* Treat as NOP. */
15775 t0
= tcg_temp_new();
15776 v1_t
= tcg_temp_new();
15777 v2_t
= tcg_temp_new();
15779 tcg_gen_movi_tl(t0
, v1
);
15780 gen_load_gpr(v1_t
, v1
);
15781 gen_load_gpr(v2_t
, v2
);
15784 case OPC_SHLL_QB_DSP
:
15786 op2
= MASK_SHLL_QB(ctx
->opcode
);
15790 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15794 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15798 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15802 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15804 case OPC_SHLL_S_PH
:
15806 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15808 case OPC_SHLLV_S_PH
:
15810 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15814 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15816 case OPC_SHLLV_S_W
:
15818 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15822 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15826 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15830 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15834 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15838 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15840 case OPC_SHRA_R_QB
:
15842 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15846 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15848 case OPC_SHRAV_R_QB
:
15850 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15854 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15856 case OPC_SHRA_R_PH
:
15858 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15862 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15864 case OPC_SHRAV_R_PH
:
15866 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15870 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15872 case OPC_SHRAV_R_W
:
15874 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15876 default: /* Invalid */
15877 MIPS_INVAL("MASK SHLL.QB");
15878 generate_exception_end(ctx
, EXCP_RI
);
15883 #ifdef TARGET_MIPS64
15884 case OPC_SHLL_OB_DSP
:
15885 op2
= MASK_SHLL_OB(ctx
->opcode
);
15889 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15893 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15895 case OPC_SHLL_S_PW
:
15897 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15899 case OPC_SHLLV_S_PW
:
15901 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15905 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15909 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15913 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15917 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15919 case OPC_SHLL_S_QH
:
15921 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15923 case OPC_SHLLV_S_QH
:
15925 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15929 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15933 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15935 case OPC_SHRA_R_OB
:
15937 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15939 case OPC_SHRAV_R_OB
:
15941 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15945 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15949 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15951 case OPC_SHRA_R_PW
:
15953 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15955 case OPC_SHRAV_R_PW
:
15957 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15961 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15965 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15967 case OPC_SHRA_R_QH
:
15969 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15971 case OPC_SHRAV_R_QH
:
15973 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15977 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15981 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15985 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15989 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15991 default: /* Invalid */
15992 MIPS_INVAL("MASK SHLL.OB");
15993 generate_exception_end(ctx
, EXCP_RI
);
16001 tcg_temp_free(v1_t
);
16002 tcg_temp_free(v2_t
);
16005 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16006 int ret
, int v1
, int v2
, int check_ret
)
16012 if ((ret
== 0) && (check_ret
== 1)) {
16013 /* Treat as NOP. */
16017 t0
= tcg_temp_new_i32();
16018 v1_t
= tcg_temp_new();
16019 v2_t
= tcg_temp_new();
16021 tcg_gen_movi_i32(t0
, ret
);
16022 gen_load_gpr(v1_t
, v1
);
16023 gen_load_gpr(v2_t
, v2
);
16026 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16027 * the same mask and op1. */
16028 case OPC_MULT_G_2E
:
16032 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16035 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16038 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16040 case OPC_MULQ_RS_W
:
16041 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16045 case OPC_DPA_W_PH_DSP
:
16047 case OPC_DPAU_H_QBL
:
16049 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16051 case OPC_DPAU_H_QBR
:
16053 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16055 case OPC_DPSU_H_QBL
:
16057 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16059 case OPC_DPSU_H_QBR
:
16061 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16065 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16067 case OPC_DPAX_W_PH
:
16069 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16071 case OPC_DPAQ_S_W_PH
:
16073 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16075 case OPC_DPAQX_S_W_PH
:
16077 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16079 case OPC_DPAQX_SA_W_PH
:
16081 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16085 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16087 case OPC_DPSX_W_PH
:
16089 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16091 case OPC_DPSQ_S_W_PH
:
16093 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16095 case OPC_DPSQX_S_W_PH
:
16097 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16099 case OPC_DPSQX_SA_W_PH
:
16101 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16103 case OPC_MULSAQ_S_W_PH
:
16105 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16107 case OPC_DPAQ_SA_L_W
:
16109 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16111 case OPC_DPSQ_SA_L_W
:
16113 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16115 case OPC_MAQ_S_W_PHL
:
16117 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16119 case OPC_MAQ_S_W_PHR
:
16121 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16123 case OPC_MAQ_SA_W_PHL
:
16125 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16127 case OPC_MAQ_SA_W_PHR
:
16129 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16131 case OPC_MULSA_W_PH
:
16133 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16137 #ifdef TARGET_MIPS64
16138 case OPC_DPAQ_W_QH_DSP
:
16140 int ac
= ret
& 0x03;
16141 tcg_gen_movi_i32(t0
, ac
);
16146 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16150 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16154 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16158 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16162 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16164 case OPC_DPAQ_S_W_QH
:
16166 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16168 case OPC_DPAQ_SA_L_PW
:
16170 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16172 case OPC_DPAU_H_OBL
:
16174 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16176 case OPC_DPAU_H_OBR
:
16178 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16182 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16184 case OPC_DPSQ_S_W_QH
:
16186 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16188 case OPC_DPSQ_SA_L_PW
:
16190 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16192 case OPC_DPSU_H_OBL
:
16194 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16196 case OPC_DPSU_H_OBR
:
16198 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16200 case OPC_MAQ_S_L_PWL
:
16202 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16204 case OPC_MAQ_S_L_PWR
:
16206 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16208 case OPC_MAQ_S_W_QHLL
:
16210 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16212 case OPC_MAQ_SA_W_QHLL
:
16214 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16216 case OPC_MAQ_S_W_QHLR
:
16218 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16220 case OPC_MAQ_SA_W_QHLR
:
16222 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16224 case OPC_MAQ_S_W_QHRL
:
16226 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16228 case OPC_MAQ_SA_W_QHRL
:
16230 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16232 case OPC_MAQ_S_W_QHRR
:
16234 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16236 case OPC_MAQ_SA_W_QHRR
:
16238 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16240 case OPC_MULSAQ_S_L_PW
:
16242 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16244 case OPC_MULSAQ_S_W_QH
:
16246 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16252 case OPC_ADDU_QB_DSP
:
16254 case OPC_MULEU_S_PH_QBL
:
16256 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16258 case OPC_MULEU_S_PH_QBR
:
16260 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16262 case OPC_MULQ_RS_PH
:
16264 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16266 case OPC_MULEQ_S_W_PHL
:
16268 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16270 case OPC_MULEQ_S_W_PHR
:
16272 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16274 case OPC_MULQ_S_PH
:
16276 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16280 #ifdef TARGET_MIPS64
16281 case OPC_ADDU_OB_DSP
:
16283 case OPC_MULEQ_S_PW_QHL
:
16285 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16287 case OPC_MULEQ_S_PW_QHR
:
16289 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16291 case OPC_MULEU_S_QH_OBL
:
16293 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16295 case OPC_MULEU_S_QH_OBR
:
16297 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16299 case OPC_MULQ_RS_QH
:
16301 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16308 tcg_temp_free_i32(t0
);
16309 tcg_temp_free(v1_t
);
16310 tcg_temp_free(v2_t
);
16313 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16321 /* Treat as NOP. */
16325 t0
= tcg_temp_new();
16326 val_t
= tcg_temp_new();
16327 gen_load_gpr(val_t
, val
);
16330 case OPC_ABSQ_S_PH_DSP
:
16334 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16339 target_long result
;
16340 imm
= (ctx
->opcode
>> 16) & 0xFF;
16341 result
= (uint32_t)imm
<< 24 |
16342 (uint32_t)imm
<< 16 |
16343 (uint32_t)imm
<< 8 |
16345 result
= (int32_t)result
;
16346 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16351 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16352 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16353 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16354 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16355 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16356 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16361 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16362 imm
= (int16_t)(imm
<< 6) >> 6;
16363 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16364 (target_long
)((int32_t)imm
<< 16 | \
16370 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16371 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16372 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16373 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16377 #ifdef TARGET_MIPS64
16378 case OPC_ABSQ_S_QH_DSP
:
16385 imm
= (ctx
->opcode
>> 16) & 0xFF;
16386 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16387 temp
= (temp
<< 16) | temp
;
16388 temp
= (temp
<< 32) | temp
;
16389 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16397 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16398 imm
= (int16_t)(imm
<< 6) >> 6;
16399 temp
= ((target_long
)imm
<< 32) \
16400 | ((target_long
)imm
& 0xFFFFFFFF);
16401 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16409 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16410 imm
= (int16_t)(imm
<< 6) >> 6;
16412 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16413 ((uint64_t)(uint16_t)imm
<< 32) |
16414 ((uint64_t)(uint16_t)imm
<< 16) |
16415 (uint64_t)(uint16_t)imm
;
16416 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16421 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16422 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16423 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16424 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16425 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16426 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16427 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16431 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16432 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16433 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16437 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16438 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16439 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16440 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16441 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16448 tcg_temp_free(val_t
);
16451 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16452 uint32_t op1
, uint32_t op2
,
16453 int ret
, int v1
, int v2
, int check_ret
)
16459 if ((ret
== 0) && (check_ret
== 1)) {
16460 /* Treat as NOP. */
16464 t1
= tcg_temp_new();
16465 v1_t
= tcg_temp_new();
16466 v2_t
= tcg_temp_new();
16468 gen_load_gpr(v1_t
, v1
);
16469 gen_load_gpr(v2_t
, v2
);
16472 case OPC_CMPU_EQ_QB_DSP
:
16474 case OPC_CMPU_EQ_QB
:
16476 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16478 case OPC_CMPU_LT_QB
:
16480 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16482 case OPC_CMPU_LE_QB
:
16484 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16486 case OPC_CMPGU_EQ_QB
:
16488 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16490 case OPC_CMPGU_LT_QB
:
16492 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16494 case OPC_CMPGU_LE_QB
:
16496 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16498 case OPC_CMPGDU_EQ_QB
:
16500 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16501 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16502 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16503 tcg_gen_shli_tl(t1
, t1
, 24);
16504 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16506 case OPC_CMPGDU_LT_QB
:
16508 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16509 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16510 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16511 tcg_gen_shli_tl(t1
, t1
, 24);
16512 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16514 case OPC_CMPGDU_LE_QB
:
16516 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16517 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16518 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16519 tcg_gen_shli_tl(t1
, t1
, 24);
16520 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16522 case OPC_CMP_EQ_PH
:
16524 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16526 case OPC_CMP_LT_PH
:
16528 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16530 case OPC_CMP_LE_PH
:
16532 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16536 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16540 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16542 case OPC_PACKRL_PH
:
16544 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16548 #ifdef TARGET_MIPS64
16549 case OPC_CMPU_EQ_OB_DSP
:
16551 case OPC_CMP_EQ_PW
:
16553 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16555 case OPC_CMP_LT_PW
:
16557 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16559 case OPC_CMP_LE_PW
:
16561 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16563 case OPC_CMP_EQ_QH
:
16565 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16567 case OPC_CMP_LT_QH
:
16569 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16571 case OPC_CMP_LE_QH
:
16573 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16575 case OPC_CMPGDU_EQ_OB
:
16577 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16579 case OPC_CMPGDU_LT_OB
:
16581 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16583 case OPC_CMPGDU_LE_OB
:
16585 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16587 case OPC_CMPGU_EQ_OB
:
16589 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16591 case OPC_CMPGU_LT_OB
:
16593 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16595 case OPC_CMPGU_LE_OB
:
16597 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16599 case OPC_CMPU_EQ_OB
:
16601 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16603 case OPC_CMPU_LT_OB
:
16605 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16607 case OPC_CMPU_LE_OB
:
16609 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16611 case OPC_PACKRL_PW
:
16613 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16617 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16621 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16625 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16633 tcg_temp_free(v1_t
);
16634 tcg_temp_free(v2_t
);
16637 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16638 uint32_t op1
, int rt
, int rs
, int sa
)
16645 /* Treat as NOP. */
16649 t0
= tcg_temp_new();
16650 gen_load_gpr(t0
, rs
);
16653 case OPC_APPEND_DSP
:
16654 switch (MASK_APPEND(ctx
->opcode
)) {
16657 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16659 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16663 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16664 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16665 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16666 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16668 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16672 if (sa
!= 0 && sa
!= 2) {
16673 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16674 tcg_gen_ext32u_tl(t0
, t0
);
16675 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16676 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16678 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16680 default: /* Invalid */
16681 MIPS_INVAL("MASK APPEND");
16682 generate_exception_end(ctx
, EXCP_RI
);
16686 #ifdef TARGET_MIPS64
16687 case OPC_DAPPEND_DSP
:
16688 switch (MASK_DAPPEND(ctx
->opcode
)) {
16691 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16695 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16696 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16697 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16701 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16702 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16703 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16708 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16709 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16710 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16711 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16714 default: /* Invalid */
16715 MIPS_INVAL("MASK DAPPEND");
16716 generate_exception_end(ctx
, EXCP_RI
);
16725 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16726 int ret
, int v1
, int v2
, int check_ret
)
16735 if ((ret
== 0) && (check_ret
== 1)) {
16736 /* Treat as NOP. */
16740 t0
= tcg_temp_new();
16741 t1
= tcg_temp_new();
16742 v1_t
= tcg_temp_new();
16743 v2_t
= tcg_temp_new();
16745 gen_load_gpr(v1_t
, v1
);
16746 gen_load_gpr(v2_t
, v2
);
16749 case OPC_EXTR_W_DSP
:
16753 tcg_gen_movi_tl(t0
, v2
);
16754 tcg_gen_movi_tl(t1
, v1
);
16755 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16758 tcg_gen_movi_tl(t0
, v2
);
16759 tcg_gen_movi_tl(t1
, v1
);
16760 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16762 case OPC_EXTR_RS_W
:
16763 tcg_gen_movi_tl(t0
, v2
);
16764 tcg_gen_movi_tl(t1
, v1
);
16765 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16768 tcg_gen_movi_tl(t0
, v2
);
16769 tcg_gen_movi_tl(t1
, v1
);
16770 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16772 case OPC_EXTRV_S_H
:
16773 tcg_gen_movi_tl(t0
, v2
);
16774 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16777 tcg_gen_movi_tl(t0
, v2
);
16778 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16780 case OPC_EXTRV_R_W
:
16781 tcg_gen_movi_tl(t0
, v2
);
16782 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16784 case OPC_EXTRV_RS_W
:
16785 tcg_gen_movi_tl(t0
, v2
);
16786 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16789 tcg_gen_movi_tl(t0
, v2
);
16790 tcg_gen_movi_tl(t1
, v1
);
16791 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16794 tcg_gen_movi_tl(t0
, v2
);
16795 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16798 tcg_gen_movi_tl(t0
, v2
);
16799 tcg_gen_movi_tl(t1
, v1
);
16800 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16803 tcg_gen_movi_tl(t0
, v2
);
16804 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16807 imm
= (ctx
->opcode
>> 20) & 0x3F;
16808 tcg_gen_movi_tl(t0
, ret
);
16809 tcg_gen_movi_tl(t1
, imm
);
16810 gen_helper_shilo(t0
, t1
, cpu_env
);
16813 tcg_gen_movi_tl(t0
, ret
);
16814 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16817 tcg_gen_movi_tl(t0
, ret
);
16818 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16821 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16822 tcg_gen_movi_tl(t0
, imm
);
16823 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16826 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16827 tcg_gen_movi_tl(t0
, imm
);
16828 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16832 #ifdef TARGET_MIPS64
16833 case OPC_DEXTR_W_DSP
:
16837 tcg_gen_movi_tl(t0
, ret
);
16838 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16842 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16843 int ac
= (ctx
->opcode
>> 11) & 0x03;
16844 tcg_gen_movi_tl(t0
, shift
);
16845 tcg_gen_movi_tl(t1
, ac
);
16846 gen_helper_dshilo(t0
, t1
, cpu_env
);
16851 int ac
= (ctx
->opcode
>> 11) & 0x03;
16852 tcg_gen_movi_tl(t0
, ac
);
16853 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16857 tcg_gen_movi_tl(t0
, v2
);
16858 tcg_gen_movi_tl(t1
, v1
);
16860 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16863 tcg_gen_movi_tl(t0
, v2
);
16864 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16867 tcg_gen_movi_tl(t0
, v2
);
16868 tcg_gen_movi_tl(t1
, v1
);
16869 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16872 tcg_gen_movi_tl(t0
, v2
);
16873 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16876 tcg_gen_movi_tl(t0
, v2
);
16877 tcg_gen_movi_tl(t1
, v1
);
16878 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16880 case OPC_DEXTR_R_L
:
16881 tcg_gen_movi_tl(t0
, v2
);
16882 tcg_gen_movi_tl(t1
, v1
);
16883 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16885 case OPC_DEXTR_RS_L
:
16886 tcg_gen_movi_tl(t0
, v2
);
16887 tcg_gen_movi_tl(t1
, v1
);
16888 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16891 tcg_gen_movi_tl(t0
, v2
);
16892 tcg_gen_movi_tl(t1
, v1
);
16893 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16895 case OPC_DEXTR_R_W
:
16896 tcg_gen_movi_tl(t0
, v2
);
16897 tcg_gen_movi_tl(t1
, v1
);
16898 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16900 case OPC_DEXTR_RS_W
:
16901 tcg_gen_movi_tl(t0
, v2
);
16902 tcg_gen_movi_tl(t1
, v1
);
16903 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16905 case OPC_DEXTR_S_H
:
16906 tcg_gen_movi_tl(t0
, v2
);
16907 tcg_gen_movi_tl(t1
, v1
);
16908 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16910 case OPC_DEXTRV_S_H
:
16911 tcg_gen_movi_tl(t0
, v2
);
16912 tcg_gen_movi_tl(t1
, v1
);
16913 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16916 tcg_gen_movi_tl(t0
, v2
);
16917 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16919 case OPC_DEXTRV_R_L
:
16920 tcg_gen_movi_tl(t0
, v2
);
16921 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16923 case OPC_DEXTRV_RS_L
:
16924 tcg_gen_movi_tl(t0
, v2
);
16925 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16928 tcg_gen_movi_tl(t0
, v2
);
16929 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16931 case OPC_DEXTRV_R_W
:
16932 tcg_gen_movi_tl(t0
, v2
);
16933 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16935 case OPC_DEXTRV_RS_W
:
16936 tcg_gen_movi_tl(t0
, v2
);
16937 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16946 tcg_temp_free(v1_t
);
16947 tcg_temp_free(v2_t
);
16950 /* End MIPSDSP functions. */
16952 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16954 int rs
, rt
, rd
, sa
;
16957 rs
= (ctx
->opcode
>> 21) & 0x1f;
16958 rt
= (ctx
->opcode
>> 16) & 0x1f;
16959 rd
= (ctx
->opcode
>> 11) & 0x1f;
16960 sa
= (ctx
->opcode
>> 6) & 0x1f;
16962 op1
= MASK_SPECIAL(ctx
->opcode
);
16965 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16967 case OPC_MULT
... OPC_DIVU
:
16968 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16978 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16981 MIPS_INVAL("special_r6 muldiv");
16982 generate_exception_end(ctx
, EXCP_RI
);
16988 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16992 if (rt
== 0 && sa
== 1) {
16993 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16994 We need additionally to check other fields */
16995 gen_cl(ctx
, op1
, rd
, rs
);
16997 generate_exception_end(ctx
, EXCP_RI
);
17001 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17002 gen_helper_do_semihosting(cpu_env
);
17004 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17005 generate_exception_end(ctx
, EXCP_RI
);
17007 generate_exception_end(ctx
, EXCP_DBp
);
17011 #if defined(TARGET_MIPS64)
17013 check_mips_64(ctx
);
17014 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17018 if (rt
== 0 && sa
== 1) {
17019 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17020 We need additionally to check other fields */
17021 check_mips_64(ctx
);
17022 gen_cl(ctx
, op1
, rd
, rs
);
17024 generate_exception_end(ctx
, EXCP_RI
);
17027 case OPC_DMULT
... OPC_DDIVU
:
17028 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17038 check_mips_64(ctx
);
17039 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17042 MIPS_INVAL("special_r6 muldiv");
17043 generate_exception_end(ctx
, EXCP_RI
);
17048 default: /* Invalid */
17049 MIPS_INVAL("special_r6");
17050 generate_exception_end(ctx
, EXCP_RI
);
17055 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17057 int rs
, rt
, rd
, sa
;
17060 rs
= (ctx
->opcode
>> 21) & 0x1f;
17061 rt
= (ctx
->opcode
>> 16) & 0x1f;
17062 rd
= (ctx
->opcode
>> 11) & 0x1f;
17063 sa
= (ctx
->opcode
>> 6) & 0x1f;
17065 op1
= MASK_SPECIAL(ctx
->opcode
);
17067 case OPC_MOVN
: /* Conditional move */
17069 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17070 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17071 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17073 case OPC_MFHI
: /* Move from HI/LO */
17075 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17078 case OPC_MTLO
: /* Move to HI/LO */
17079 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17082 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17083 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17084 check_cp1_enabled(ctx
);
17085 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17086 (ctx
->opcode
>> 16) & 1);
17088 generate_exception_err(ctx
, EXCP_CpU
, 1);
17094 check_insn(ctx
, INSN_VR54XX
);
17095 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17096 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17098 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17103 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17105 #if defined(TARGET_MIPS64)
17106 case OPC_DMULT
... OPC_DDIVU
:
17107 check_insn(ctx
, ISA_MIPS3
);
17108 check_mips_64(ctx
);
17109 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17113 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17116 #ifdef MIPS_STRICT_STANDARD
17117 MIPS_INVAL("SPIM");
17118 generate_exception_end(ctx
, EXCP_RI
);
17120 /* Implemented as RI exception for now. */
17121 MIPS_INVAL("spim (unofficial)");
17122 generate_exception_end(ctx
, EXCP_RI
);
17125 default: /* Invalid */
17126 MIPS_INVAL("special_legacy");
17127 generate_exception_end(ctx
, EXCP_RI
);
17132 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17134 int rs
, rt
, rd
, sa
;
17137 rs
= (ctx
->opcode
>> 21) & 0x1f;
17138 rt
= (ctx
->opcode
>> 16) & 0x1f;
17139 rd
= (ctx
->opcode
>> 11) & 0x1f;
17140 sa
= (ctx
->opcode
>> 6) & 0x1f;
17142 op1
= MASK_SPECIAL(ctx
->opcode
);
17144 case OPC_SLL
: /* Shift with immediate */
17145 if (sa
== 5 && rd
== 0 &&
17146 rs
== 0 && rt
== 0) { /* PAUSE */
17147 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17148 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17149 generate_exception_end(ctx
, EXCP_RI
);
17155 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17158 switch ((ctx
->opcode
>> 21) & 0x1f) {
17160 /* rotr is decoded as srl on non-R2 CPUs */
17161 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17166 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17169 generate_exception_end(ctx
, EXCP_RI
);
17173 case OPC_ADD
... OPC_SUBU
:
17174 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17176 case OPC_SLLV
: /* Shifts */
17178 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17181 switch ((ctx
->opcode
>> 6) & 0x1f) {
17183 /* rotrv is decoded as srlv on non-R2 CPUs */
17184 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17189 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17192 generate_exception_end(ctx
, EXCP_RI
);
17196 case OPC_SLT
: /* Set on less than */
17198 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17200 case OPC_AND
: /* Logic*/
17204 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17207 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17209 case OPC_TGE
... OPC_TEQ
: /* Traps */
17211 check_insn(ctx
, ISA_MIPS2
);
17212 gen_trap(ctx
, op1
, rs
, rt
, -1);
17214 case OPC_LSA
: /* OPC_PMON */
17215 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17216 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17217 decode_opc_special_r6(env
, ctx
);
17219 /* Pmon entry point, also R4010 selsl */
17220 #ifdef MIPS_STRICT_STANDARD
17221 MIPS_INVAL("PMON / selsl");
17222 generate_exception_end(ctx
, EXCP_RI
);
17224 gen_helper_0e0i(pmon
, sa
);
17229 generate_exception_end(ctx
, EXCP_SYSCALL
);
17232 generate_exception_end(ctx
, EXCP_BREAK
);
17235 check_insn(ctx
, ISA_MIPS2
);
17236 gen_sync(extract32(ctx
->opcode
, 6, 5));
17239 #if defined(TARGET_MIPS64)
17240 /* MIPS64 specific opcodes */
17245 check_insn(ctx
, ISA_MIPS3
);
17246 check_mips_64(ctx
);
17247 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17250 switch ((ctx
->opcode
>> 21) & 0x1f) {
17252 /* drotr is decoded as dsrl on non-R2 CPUs */
17253 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17258 check_insn(ctx
, ISA_MIPS3
);
17259 check_mips_64(ctx
);
17260 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17263 generate_exception_end(ctx
, EXCP_RI
);
17268 switch ((ctx
->opcode
>> 21) & 0x1f) {
17270 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17271 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17276 check_insn(ctx
, ISA_MIPS3
);
17277 check_mips_64(ctx
);
17278 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17281 generate_exception_end(ctx
, EXCP_RI
);
17285 case OPC_DADD
... OPC_DSUBU
:
17286 check_insn(ctx
, ISA_MIPS3
);
17287 check_mips_64(ctx
);
17288 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17292 check_insn(ctx
, ISA_MIPS3
);
17293 check_mips_64(ctx
);
17294 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17297 switch ((ctx
->opcode
>> 6) & 0x1f) {
17299 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17300 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17305 check_insn(ctx
, ISA_MIPS3
);
17306 check_mips_64(ctx
);
17307 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17310 generate_exception_end(ctx
, EXCP_RI
);
17315 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17316 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17317 decode_opc_special_r6(env
, ctx
);
17322 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17323 decode_opc_special_r6(env
, ctx
);
17325 decode_opc_special_legacy(env
, ctx
);
17330 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17335 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17337 rs
= (ctx
->opcode
>> 21) & 0x1f;
17338 rt
= (ctx
->opcode
>> 16) & 0x1f;
17339 rd
= (ctx
->opcode
>> 11) & 0x1f;
17341 op1
= MASK_SPECIAL2(ctx
->opcode
);
17343 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17344 case OPC_MSUB
... OPC_MSUBU
:
17345 check_insn(ctx
, ISA_MIPS32
);
17346 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17349 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17352 case OPC_DIVU_G_2F
:
17353 case OPC_MULT_G_2F
:
17354 case OPC_MULTU_G_2F
:
17356 case OPC_MODU_G_2F
:
17357 check_insn(ctx
, INSN_LOONGSON2F
);
17358 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17362 check_insn(ctx
, ISA_MIPS32
);
17363 gen_cl(ctx
, op1
, rd
, rs
);
17366 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17367 gen_helper_do_semihosting(cpu_env
);
17369 /* XXX: not clear which exception should be raised
17370 * when in debug mode...
17372 check_insn(ctx
, ISA_MIPS32
);
17373 generate_exception_end(ctx
, EXCP_DBp
);
17376 #if defined(TARGET_MIPS64)
17379 check_insn(ctx
, ISA_MIPS64
);
17380 check_mips_64(ctx
);
17381 gen_cl(ctx
, op1
, rd
, rs
);
17383 case OPC_DMULT_G_2F
:
17384 case OPC_DMULTU_G_2F
:
17385 case OPC_DDIV_G_2F
:
17386 case OPC_DDIVU_G_2F
:
17387 case OPC_DMOD_G_2F
:
17388 case OPC_DMODU_G_2F
:
17389 check_insn(ctx
, INSN_LOONGSON2F
);
17390 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17393 default: /* Invalid */
17394 MIPS_INVAL("special2_legacy");
17395 generate_exception_end(ctx
, EXCP_RI
);
17400 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17402 int rs
, rt
, rd
, sa
;
17406 rs
= (ctx
->opcode
>> 21) & 0x1f;
17407 rt
= (ctx
->opcode
>> 16) & 0x1f;
17408 rd
= (ctx
->opcode
>> 11) & 0x1f;
17409 sa
= (ctx
->opcode
>> 6) & 0x1f;
17410 imm
= (int16_t)ctx
->opcode
>> 7;
17412 op1
= MASK_SPECIAL3(ctx
->opcode
);
17416 /* hint codes 24-31 are reserved and signal RI */
17417 generate_exception_end(ctx
, EXCP_RI
);
17419 /* Treat as NOP. */
17422 check_cp0_enabled(ctx
);
17423 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17424 gen_cache_operation(ctx
, rt
, rs
, imm
);
17428 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17431 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17436 /* Treat as NOP. */
17439 op2
= MASK_BSHFL(ctx
->opcode
);
17441 case OPC_ALIGN
... OPC_ALIGN_END
:
17442 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17445 gen_bitswap(ctx
, op2
, rd
, rt
);
17450 #if defined(TARGET_MIPS64)
17452 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17455 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17458 check_mips_64(ctx
);
17461 /* Treat as NOP. */
17464 op2
= MASK_DBSHFL(ctx
->opcode
);
17466 case OPC_DALIGN
... OPC_DALIGN_END
:
17467 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17470 gen_bitswap(ctx
, op2
, rd
, rt
);
17477 default: /* Invalid */
17478 MIPS_INVAL("special3_r6");
17479 generate_exception_end(ctx
, EXCP_RI
);
17484 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17489 rs
= (ctx
->opcode
>> 21) & 0x1f;
17490 rt
= (ctx
->opcode
>> 16) & 0x1f;
17491 rd
= (ctx
->opcode
>> 11) & 0x1f;
17493 op1
= MASK_SPECIAL3(ctx
->opcode
);
17495 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17496 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17497 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17498 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17499 * the same mask and op1. */
17500 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17501 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17504 case OPC_ADDUH_R_QB
:
17506 case OPC_ADDQH_R_PH
:
17508 case OPC_ADDQH_R_W
:
17510 case OPC_SUBUH_R_QB
:
17512 case OPC_SUBQH_R_PH
:
17514 case OPC_SUBQH_R_W
:
17515 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17520 case OPC_MULQ_RS_W
:
17521 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17524 MIPS_INVAL("MASK ADDUH.QB");
17525 generate_exception_end(ctx
, EXCP_RI
);
17528 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17529 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17531 generate_exception_end(ctx
, EXCP_RI
);
17535 op2
= MASK_LX(ctx
->opcode
);
17537 #if defined(TARGET_MIPS64)
17543 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17545 default: /* Invalid */
17546 MIPS_INVAL("MASK LX");
17547 generate_exception_end(ctx
, EXCP_RI
);
17551 case OPC_ABSQ_S_PH_DSP
:
17552 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17554 case OPC_ABSQ_S_QB
:
17555 case OPC_ABSQ_S_PH
:
17557 case OPC_PRECEQ_W_PHL
:
17558 case OPC_PRECEQ_W_PHR
:
17559 case OPC_PRECEQU_PH_QBL
:
17560 case OPC_PRECEQU_PH_QBR
:
17561 case OPC_PRECEQU_PH_QBLA
:
17562 case OPC_PRECEQU_PH_QBRA
:
17563 case OPC_PRECEU_PH_QBL
:
17564 case OPC_PRECEU_PH_QBR
:
17565 case OPC_PRECEU_PH_QBLA
:
17566 case OPC_PRECEU_PH_QBRA
:
17567 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17574 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17577 MIPS_INVAL("MASK ABSQ_S.PH");
17578 generate_exception_end(ctx
, EXCP_RI
);
17582 case OPC_ADDU_QB_DSP
:
17583 op2
= MASK_ADDU_QB(ctx
->opcode
);
17586 case OPC_ADDQ_S_PH
:
17589 case OPC_ADDU_S_QB
:
17591 case OPC_ADDU_S_PH
:
17593 case OPC_SUBQ_S_PH
:
17596 case OPC_SUBU_S_QB
:
17598 case OPC_SUBU_S_PH
:
17602 case OPC_RADDU_W_QB
:
17603 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17605 case OPC_MULEU_S_PH_QBL
:
17606 case OPC_MULEU_S_PH_QBR
:
17607 case OPC_MULQ_RS_PH
:
17608 case OPC_MULEQ_S_W_PHL
:
17609 case OPC_MULEQ_S_W_PHR
:
17610 case OPC_MULQ_S_PH
:
17611 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17613 default: /* Invalid */
17614 MIPS_INVAL("MASK ADDU.QB");
17615 generate_exception_end(ctx
, EXCP_RI
);
17620 case OPC_CMPU_EQ_QB_DSP
:
17621 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17623 case OPC_PRECR_SRA_PH_W
:
17624 case OPC_PRECR_SRA_R_PH_W
:
17625 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17627 case OPC_PRECR_QB_PH
:
17628 case OPC_PRECRQ_QB_PH
:
17629 case OPC_PRECRQ_PH_W
:
17630 case OPC_PRECRQ_RS_PH_W
:
17631 case OPC_PRECRQU_S_QB_PH
:
17632 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17634 case OPC_CMPU_EQ_QB
:
17635 case OPC_CMPU_LT_QB
:
17636 case OPC_CMPU_LE_QB
:
17637 case OPC_CMP_EQ_PH
:
17638 case OPC_CMP_LT_PH
:
17639 case OPC_CMP_LE_PH
:
17640 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17642 case OPC_CMPGU_EQ_QB
:
17643 case OPC_CMPGU_LT_QB
:
17644 case OPC_CMPGU_LE_QB
:
17645 case OPC_CMPGDU_EQ_QB
:
17646 case OPC_CMPGDU_LT_QB
:
17647 case OPC_CMPGDU_LE_QB
:
17650 case OPC_PACKRL_PH
:
17651 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17653 default: /* Invalid */
17654 MIPS_INVAL("MASK CMPU.EQ.QB");
17655 generate_exception_end(ctx
, EXCP_RI
);
17659 case OPC_SHLL_QB_DSP
:
17660 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17662 case OPC_DPA_W_PH_DSP
:
17663 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17665 case OPC_DPAU_H_QBL
:
17666 case OPC_DPAU_H_QBR
:
17667 case OPC_DPSU_H_QBL
:
17668 case OPC_DPSU_H_QBR
:
17670 case OPC_DPAX_W_PH
:
17671 case OPC_DPAQ_S_W_PH
:
17672 case OPC_DPAQX_S_W_PH
:
17673 case OPC_DPAQX_SA_W_PH
:
17675 case OPC_DPSX_W_PH
:
17676 case OPC_DPSQ_S_W_PH
:
17677 case OPC_DPSQX_S_W_PH
:
17678 case OPC_DPSQX_SA_W_PH
:
17679 case OPC_MULSAQ_S_W_PH
:
17680 case OPC_DPAQ_SA_L_W
:
17681 case OPC_DPSQ_SA_L_W
:
17682 case OPC_MAQ_S_W_PHL
:
17683 case OPC_MAQ_S_W_PHR
:
17684 case OPC_MAQ_SA_W_PHL
:
17685 case OPC_MAQ_SA_W_PHR
:
17686 case OPC_MULSA_W_PH
:
17687 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17689 default: /* Invalid */
17690 MIPS_INVAL("MASK DPAW.PH");
17691 generate_exception_end(ctx
, EXCP_RI
);
17696 op2
= MASK_INSV(ctx
->opcode
);
17707 t0
= tcg_temp_new();
17708 t1
= tcg_temp_new();
17710 gen_load_gpr(t0
, rt
);
17711 gen_load_gpr(t1
, rs
);
17713 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17719 default: /* Invalid */
17720 MIPS_INVAL("MASK INSV");
17721 generate_exception_end(ctx
, EXCP_RI
);
17725 case OPC_APPEND_DSP
:
17726 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17728 case OPC_EXTR_W_DSP
:
17729 op2
= MASK_EXTR_W(ctx
->opcode
);
17733 case OPC_EXTR_RS_W
:
17735 case OPC_EXTRV_S_H
:
17737 case OPC_EXTRV_R_W
:
17738 case OPC_EXTRV_RS_W
:
17743 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17746 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17752 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17754 default: /* Invalid */
17755 MIPS_INVAL("MASK EXTR.W");
17756 generate_exception_end(ctx
, EXCP_RI
);
17760 #if defined(TARGET_MIPS64)
17761 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17762 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17763 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17764 check_insn(ctx
, INSN_LOONGSON2E
);
17765 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17767 case OPC_ABSQ_S_QH_DSP
:
17768 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17770 case OPC_PRECEQ_L_PWL
:
17771 case OPC_PRECEQ_L_PWR
:
17772 case OPC_PRECEQ_PW_QHL
:
17773 case OPC_PRECEQ_PW_QHR
:
17774 case OPC_PRECEQ_PW_QHLA
:
17775 case OPC_PRECEQ_PW_QHRA
:
17776 case OPC_PRECEQU_QH_OBL
:
17777 case OPC_PRECEQU_QH_OBR
:
17778 case OPC_PRECEQU_QH_OBLA
:
17779 case OPC_PRECEQU_QH_OBRA
:
17780 case OPC_PRECEU_QH_OBL
:
17781 case OPC_PRECEU_QH_OBR
:
17782 case OPC_PRECEU_QH_OBLA
:
17783 case OPC_PRECEU_QH_OBRA
:
17784 case OPC_ABSQ_S_OB
:
17785 case OPC_ABSQ_S_PW
:
17786 case OPC_ABSQ_S_QH
:
17787 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17795 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17797 default: /* Invalid */
17798 MIPS_INVAL("MASK ABSQ_S.QH");
17799 generate_exception_end(ctx
, EXCP_RI
);
17803 case OPC_ADDU_OB_DSP
:
17804 op2
= MASK_ADDU_OB(ctx
->opcode
);
17806 case OPC_RADDU_L_OB
:
17808 case OPC_SUBQ_S_PW
:
17810 case OPC_SUBQ_S_QH
:
17812 case OPC_SUBU_S_OB
:
17814 case OPC_SUBU_S_QH
:
17816 case OPC_SUBUH_R_OB
:
17818 case OPC_ADDQ_S_PW
:
17820 case OPC_ADDQ_S_QH
:
17822 case OPC_ADDU_S_OB
:
17824 case OPC_ADDU_S_QH
:
17826 case OPC_ADDUH_R_OB
:
17827 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17829 case OPC_MULEQ_S_PW_QHL
:
17830 case OPC_MULEQ_S_PW_QHR
:
17831 case OPC_MULEU_S_QH_OBL
:
17832 case OPC_MULEU_S_QH_OBR
:
17833 case OPC_MULQ_RS_QH
:
17834 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17836 default: /* Invalid */
17837 MIPS_INVAL("MASK ADDU.OB");
17838 generate_exception_end(ctx
, EXCP_RI
);
17842 case OPC_CMPU_EQ_OB_DSP
:
17843 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17845 case OPC_PRECR_SRA_QH_PW
:
17846 case OPC_PRECR_SRA_R_QH_PW
:
17847 /* Return value is rt. */
17848 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17850 case OPC_PRECR_OB_QH
:
17851 case OPC_PRECRQ_OB_QH
:
17852 case OPC_PRECRQ_PW_L
:
17853 case OPC_PRECRQ_QH_PW
:
17854 case OPC_PRECRQ_RS_QH_PW
:
17855 case OPC_PRECRQU_S_OB_QH
:
17856 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17858 case OPC_CMPU_EQ_OB
:
17859 case OPC_CMPU_LT_OB
:
17860 case OPC_CMPU_LE_OB
:
17861 case OPC_CMP_EQ_QH
:
17862 case OPC_CMP_LT_QH
:
17863 case OPC_CMP_LE_QH
:
17864 case OPC_CMP_EQ_PW
:
17865 case OPC_CMP_LT_PW
:
17866 case OPC_CMP_LE_PW
:
17867 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17869 case OPC_CMPGDU_EQ_OB
:
17870 case OPC_CMPGDU_LT_OB
:
17871 case OPC_CMPGDU_LE_OB
:
17872 case OPC_CMPGU_EQ_OB
:
17873 case OPC_CMPGU_LT_OB
:
17874 case OPC_CMPGU_LE_OB
:
17875 case OPC_PACKRL_PW
:
17879 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17881 default: /* Invalid */
17882 MIPS_INVAL("MASK CMPU_EQ.OB");
17883 generate_exception_end(ctx
, EXCP_RI
);
17887 case OPC_DAPPEND_DSP
:
17888 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17890 case OPC_DEXTR_W_DSP
:
17891 op2
= MASK_DEXTR_W(ctx
->opcode
);
17898 case OPC_DEXTR_R_L
:
17899 case OPC_DEXTR_RS_L
:
17901 case OPC_DEXTR_R_W
:
17902 case OPC_DEXTR_RS_W
:
17903 case OPC_DEXTR_S_H
:
17905 case OPC_DEXTRV_R_L
:
17906 case OPC_DEXTRV_RS_L
:
17907 case OPC_DEXTRV_S_H
:
17909 case OPC_DEXTRV_R_W
:
17910 case OPC_DEXTRV_RS_W
:
17911 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17916 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17918 default: /* Invalid */
17919 MIPS_INVAL("MASK EXTR.W");
17920 generate_exception_end(ctx
, EXCP_RI
);
17924 case OPC_DPAQ_W_QH_DSP
:
17925 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17927 case OPC_DPAU_H_OBL
:
17928 case OPC_DPAU_H_OBR
:
17929 case OPC_DPSU_H_OBL
:
17930 case OPC_DPSU_H_OBR
:
17932 case OPC_DPAQ_S_W_QH
:
17934 case OPC_DPSQ_S_W_QH
:
17935 case OPC_MULSAQ_S_W_QH
:
17936 case OPC_DPAQ_SA_L_PW
:
17937 case OPC_DPSQ_SA_L_PW
:
17938 case OPC_MULSAQ_S_L_PW
:
17939 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17941 case OPC_MAQ_S_W_QHLL
:
17942 case OPC_MAQ_S_W_QHLR
:
17943 case OPC_MAQ_S_W_QHRL
:
17944 case OPC_MAQ_S_W_QHRR
:
17945 case OPC_MAQ_SA_W_QHLL
:
17946 case OPC_MAQ_SA_W_QHLR
:
17947 case OPC_MAQ_SA_W_QHRL
:
17948 case OPC_MAQ_SA_W_QHRR
:
17949 case OPC_MAQ_S_L_PWL
:
17950 case OPC_MAQ_S_L_PWR
:
17955 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17957 default: /* Invalid */
17958 MIPS_INVAL("MASK DPAQ.W.QH");
17959 generate_exception_end(ctx
, EXCP_RI
);
17963 case OPC_DINSV_DSP
:
17964 op2
= MASK_INSV(ctx
->opcode
);
17975 t0
= tcg_temp_new();
17976 t1
= tcg_temp_new();
17978 gen_load_gpr(t0
, rt
);
17979 gen_load_gpr(t1
, rs
);
17981 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17987 default: /* Invalid */
17988 MIPS_INVAL("MASK DINSV");
17989 generate_exception_end(ctx
, EXCP_RI
);
17993 case OPC_SHLL_OB_DSP
:
17994 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17997 default: /* Invalid */
17998 MIPS_INVAL("special3_legacy");
17999 generate_exception_end(ctx
, EXCP_RI
);
18004 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18006 int rs
, rt
, rd
, sa
;
18009 rs
= (ctx
->opcode
>> 21) & 0x1f;
18010 rt
= (ctx
->opcode
>> 16) & 0x1f;
18011 rd
= (ctx
->opcode
>> 11) & 0x1f;
18012 sa
= (ctx
->opcode
>> 6) & 0x1f;
18014 op1
= MASK_SPECIAL3(ctx
->opcode
);
18018 check_insn(ctx
, ISA_MIPS32R2
);
18019 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18022 op2
= MASK_BSHFL(ctx
->opcode
);
18024 case OPC_ALIGN
... OPC_ALIGN_END
:
18026 check_insn(ctx
, ISA_MIPS32R6
);
18027 decode_opc_special3_r6(env
, ctx
);
18030 check_insn(ctx
, ISA_MIPS32R2
);
18031 gen_bshfl(ctx
, op2
, rt
, rd
);
18035 #if defined(TARGET_MIPS64)
18036 case OPC_DEXTM
... OPC_DEXT
:
18037 case OPC_DINSM
... OPC_DINS
:
18038 check_insn(ctx
, ISA_MIPS64R2
);
18039 check_mips_64(ctx
);
18040 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18043 op2
= MASK_DBSHFL(ctx
->opcode
);
18045 case OPC_DALIGN
... OPC_DALIGN_END
:
18047 check_insn(ctx
, ISA_MIPS32R6
);
18048 decode_opc_special3_r6(env
, ctx
);
18051 check_insn(ctx
, ISA_MIPS64R2
);
18052 check_mips_64(ctx
);
18053 op2
= MASK_DBSHFL(ctx
->opcode
);
18054 gen_bshfl(ctx
, op2
, rt
, rd
);
18060 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18063 check_insn(ctx
, ASE_MT
);
18065 TCGv t0
= tcg_temp_new();
18066 TCGv t1
= tcg_temp_new();
18068 gen_load_gpr(t0
, rt
);
18069 gen_load_gpr(t1
, rs
);
18070 gen_helper_fork(t0
, t1
);
18076 check_insn(ctx
, ASE_MT
);
18078 TCGv t0
= tcg_temp_new();
18080 gen_load_gpr(t0
, rs
);
18081 gen_helper_yield(t0
, cpu_env
, t0
);
18082 gen_store_gpr(t0
, rd
);
18087 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18088 decode_opc_special3_r6(env
, ctx
);
18090 decode_opc_special3_legacy(env
, ctx
);
18095 /* MIPS SIMD Architecture (MSA) */
18096 static inline int check_msa_access(DisasContext
*ctx
)
18098 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18099 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18100 generate_exception_end(ctx
, EXCP_RI
);
18104 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18105 if (ctx
->insn_flags
& ASE_MSA
) {
18106 generate_exception_end(ctx
, EXCP_MSADIS
);
18109 generate_exception_end(ctx
, EXCP_RI
);
18116 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18118 /* generates tcg ops to check if any element is 0 */
18119 /* Note this function only works with MSA_WRLEN = 128 */
18120 uint64_t eval_zero_or_big
= 0;
18121 uint64_t eval_big
= 0;
18122 TCGv_i64 t0
= tcg_temp_new_i64();
18123 TCGv_i64 t1
= tcg_temp_new_i64();
18126 eval_zero_or_big
= 0x0101010101010101ULL
;
18127 eval_big
= 0x8080808080808080ULL
;
18130 eval_zero_or_big
= 0x0001000100010001ULL
;
18131 eval_big
= 0x8000800080008000ULL
;
18134 eval_zero_or_big
= 0x0000000100000001ULL
;
18135 eval_big
= 0x8000000080000000ULL
;
18138 eval_zero_or_big
= 0x0000000000000001ULL
;
18139 eval_big
= 0x8000000000000000ULL
;
18142 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18143 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18144 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18145 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18146 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18147 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18148 tcg_gen_or_i64(t0
, t0
, t1
);
18149 /* if all bits are zero then all elements are not zero */
18150 /* if some bit is non-zero then some element is zero */
18151 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18152 tcg_gen_trunc_i64_tl(tresult
, t0
);
18153 tcg_temp_free_i64(t0
);
18154 tcg_temp_free_i64(t1
);
18157 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18159 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18160 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18161 int64_t s16
= (int16_t)ctx
->opcode
;
18163 check_msa_access(ctx
);
18165 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18166 generate_exception_end(ctx
, EXCP_RI
);
18173 TCGv_i64 t0
= tcg_temp_new_i64();
18174 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18175 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18176 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18177 tcg_gen_trunc_i64_tl(bcond
, t0
);
18178 tcg_temp_free_i64(t0
);
18185 gen_check_zero_element(bcond
, df
, wt
);
18191 gen_check_zero_element(bcond
, df
, wt
);
18192 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18196 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18198 ctx
->hflags
|= MIPS_HFLAG_BC
;
18199 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18202 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18204 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18205 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18206 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18207 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18209 TCGv_i32 twd
= tcg_const_i32(wd
);
18210 TCGv_i32 tws
= tcg_const_i32(ws
);
18211 TCGv_i32 ti8
= tcg_const_i32(i8
);
18213 switch (MASK_MSA_I8(ctx
->opcode
)) {
18215 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18218 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18221 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18224 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18227 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18230 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18233 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18239 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18240 if (df
== DF_DOUBLE
) {
18241 generate_exception_end(ctx
, EXCP_RI
);
18243 TCGv_i32 tdf
= tcg_const_i32(df
);
18244 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18245 tcg_temp_free_i32(tdf
);
18250 MIPS_INVAL("MSA instruction");
18251 generate_exception_end(ctx
, EXCP_RI
);
18255 tcg_temp_free_i32(twd
);
18256 tcg_temp_free_i32(tws
);
18257 tcg_temp_free_i32(ti8
);
18260 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18262 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18263 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18264 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18265 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18266 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18267 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18269 TCGv_i32 tdf
= tcg_const_i32(df
);
18270 TCGv_i32 twd
= tcg_const_i32(wd
);
18271 TCGv_i32 tws
= tcg_const_i32(ws
);
18272 TCGv_i32 timm
= tcg_temp_new_i32();
18273 tcg_gen_movi_i32(timm
, u5
);
18275 switch (MASK_MSA_I5(ctx
->opcode
)) {
18277 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18280 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18282 case OPC_MAXI_S_df
:
18283 tcg_gen_movi_i32(timm
, s5
);
18284 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18286 case OPC_MAXI_U_df
:
18287 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18289 case OPC_MINI_S_df
:
18290 tcg_gen_movi_i32(timm
, s5
);
18291 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18293 case OPC_MINI_U_df
:
18294 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18297 tcg_gen_movi_i32(timm
, s5
);
18298 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18300 case OPC_CLTI_S_df
:
18301 tcg_gen_movi_i32(timm
, s5
);
18302 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18304 case OPC_CLTI_U_df
:
18305 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18307 case OPC_CLEI_S_df
:
18308 tcg_gen_movi_i32(timm
, s5
);
18309 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18311 case OPC_CLEI_U_df
:
18312 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18316 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18317 tcg_gen_movi_i32(timm
, s10
);
18318 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18322 MIPS_INVAL("MSA instruction");
18323 generate_exception_end(ctx
, EXCP_RI
);
18327 tcg_temp_free_i32(tdf
);
18328 tcg_temp_free_i32(twd
);
18329 tcg_temp_free_i32(tws
);
18330 tcg_temp_free_i32(timm
);
18333 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18335 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18336 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18337 uint32_t df
= 0, m
= 0;
18338 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18339 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18346 if ((dfm
& 0x40) == 0x00) {
18349 } else if ((dfm
& 0x60) == 0x40) {
18352 } else if ((dfm
& 0x70) == 0x60) {
18355 } else if ((dfm
& 0x78) == 0x70) {
18359 generate_exception_end(ctx
, EXCP_RI
);
18363 tdf
= tcg_const_i32(df
);
18364 tm
= tcg_const_i32(m
);
18365 twd
= tcg_const_i32(wd
);
18366 tws
= tcg_const_i32(ws
);
18368 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18370 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18373 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18376 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18379 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18382 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18385 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18387 case OPC_BINSLI_df
:
18388 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18390 case OPC_BINSRI_df
:
18391 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18394 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18397 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18400 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18403 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18406 MIPS_INVAL("MSA instruction");
18407 generate_exception_end(ctx
, EXCP_RI
);
18411 tcg_temp_free_i32(tdf
);
18412 tcg_temp_free_i32(tm
);
18413 tcg_temp_free_i32(twd
);
18414 tcg_temp_free_i32(tws
);
18417 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18419 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18420 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18421 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18422 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18423 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18425 TCGv_i32 tdf
= tcg_const_i32(df
);
18426 TCGv_i32 twd
= tcg_const_i32(wd
);
18427 TCGv_i32 tws
= tcg_const_i32(ws
);
18428 TCGv_i32 twt
= tcg_const_i32(wt
);
18430 switch (MASK_MSA_3R(ctx
->opcode
)) {
18432 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18435 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18438 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18441 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18443 case OPC_SUBS_S_df
:
18444 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18447 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18450 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18453 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18456 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18459 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18461 case OPC_ADDS_A_df
:
18462 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18464 case OPC_SUBS_U_df
:
18465 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18468 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18471 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18474 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18477 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18480 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18483 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18485 case OPC_ADDS_S_df
:
18486 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18488 case OPC_SUBSUS_U_df
:
18489 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18492 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18495 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18501 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18504 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18507 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18509 case OPC_ADDS_U_df
:
18510 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18512 case OPC_SUBSUU_S_df
:
18513 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18516 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18519 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18522 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18525 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18528 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18530 case OPC_ASUB_S_df
:
18531 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18534 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18537 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18540 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18543 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18546 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18551 case OPC_ASUB_U_df
:
18552 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18558 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18561 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18564 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18566 case OPC_AVER_S_df
:
18567 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18576 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18579 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18581 case OPC_AVER_U_df
:
18582 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18585 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18588 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18591 case OPC_DOTP_S_df
:
18592 case OPC_DOTP_U_df
:
18593 case OPC_DPADD_S_df
:
18594 case OPC_DPADD_U_df
:
18595 case OPC_DPSUB_S_df
:
18596 case OPC_HADD_S_df
:
18597 case OPC_DPSUB_U_df
:
18598 case OPC_HADD_U_df
:
18599 case OPC_HSUB_S_df
:
18600 case OPC_HSUB_U_df
:
18601 if (df
== DF_BYTE
) {
18602 generate_exception_end(ctx
, EXCP_RI
);
18605 switch (MASK_MSA_3R(ctx
->opcode
)) {
18606 case OPC_DOTP_S_df
:
18607 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 case OPC_DOTP_U_df
:
18610 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18612 case OPC_DPADD_S_df
:
18613 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 case OPC_DPADD_U_df
:
18616 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18618 case OPC_DPSUB_S_df
:
18619 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18621 case OPC_HADD_S_df
:
18622 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18624 case OPC_DPSUB_U_df
:
18625 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18627 case OPC_HADD_U_df
:
18628 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18630 case OPC_HSUB_S_df
:
18631 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18633 case OPC_HSUB_U_df
:
18634 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18639 MIPS_INVAL("MSA instruction");
18640 generate_exception_end(ctx
, EXCP_RI
);
18643 tcg_temp_free_i32(twd
);
18644 tcg_temp_free_i32(tws
);
18645 tcg_temp_free_i32(twt
);
18646 tcg_temp_free_i32(tdf
);
18649 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18651 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18652 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18653 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18654 TCGv telm
= tcg_temp_new();
18655 TCGv_i32 tsr
= tcg_const_i32(source
);
18656 TCGv_i32 tdt
= tcg_const_i32(dest
);
18658 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18660 gen_load_gpr(telm
, source
);
18661 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18664 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18665 gen_store_gpr(telm
, dest
);
18668 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18671 MIPS_INVAL("MSA instruction");
18672 generate_exception_end(ctx
, EXCP_RI
);
18676 tcg_temp_free(telm
);
18677 tcg_temp_free_i32(tdt
);
18678 tcg_temp_free_i32(tsr
);
18681 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18684 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18685 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18686 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18688 TCGv_i32 tws
= tcg_const_i32(ws
);
18689 TCGv_i32 twd
= tcg_const_i32(wd
);
18690 TCGv_i32 tn
= tcg_const_i32(n
);
18691 TCGv_i32 tdf
= tcg_const_i32(df
);
18693 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18695 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18697 case OPC_SPLATI_df
:
18698 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18701 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18703 case OPC_COPY_S_df
:
18704 case OPC_COPY_U_df
:
18705 case OPC_INSERT_df
:
18706 #if !defined(TARGET_MIPS64)
18707 /* Double format valid only for MIPS64 */
18708 if (df
== DF_DOUBLE
) {
18709 generate_exception_end(ctx
, EXCP_RI
);
18713 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18714 case OPC_COPY_S_df
:
18715 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18717 case OPC_COPY_U_df
:
18718 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18720 case OPC_INSERT_df
:
18721 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18726 MIPS_INVAL("MSA instruction");
18727 generate_exception_end(ctx
, EXCP_RI
);
18729 tcg_temp_free_i32(twd
);
18730 tcg_temp_free_i32(tws
);
18731 tcg_temp_free_i32(tn
);
18732 tcg_temp_free_i32(tdf
);
18735 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18737 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18738 uint32_t df
= 0, n
= 0;
18740 if ((dfn
& 0x30) == 0x00) {
18743 } else if ((dfn
& 0x38) == 0x20) {
18746 } else if ((dfn
& 0x3c) == 0x30) {
18749 } else if ((dfn
& 0x3e) == 0x38) {
18752 } else if (dfn
== 0x3E) {
18753 /* CTCMSA, CFCMSA, MOVE.V */
18754 gen_msa_elm_3e(env
, ctx
);
18757 generate_exception_end(ctx
, EXCP_RI
);
18761 gen_msa_elm_df(env
, ctx
, df
, n
);
18764 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18766 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18767 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18768 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18769 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18770 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18772 TCGv_i32 twd
= tcg_const_i32(wd
);
18773 TCGv_i32 tws
= tcg_const_i32(ws
);
18774 TCGv_i32 twt
= tcg_const_i32(wt
);
18775 TCGv_i32 tdf
= tcg_temp_new_i32();
18777 /* adjust df value for floating-point instruction */
18778 tcg_gen_movi_i32(tdf
, df
+ 2);
18780 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18782 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18785 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18788 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18791 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18794 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18797 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18800 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18803 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18806 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18809 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18812 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18815 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18818 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18821 tcg_gen_movi_i32(tdf
, df
+ 1);
18822 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18825 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18830 case OPC_MADD_Q_df
:
18831 tcg_gen_movi_i32(tdf
, df
+ 1);
18832 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18835 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 case OPC_MSUB_Q_df
:
18838 tcg_gen_movi_i32(tdf
, df
+ 1);
18839 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18842 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18845 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18848 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18851 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18854 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18857 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18860 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18863 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18866 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18869 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18872 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18875 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18878 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18880 case OPC_MULR_Q_df
:
18881 tcg_gen_movi_i32(tdf
, df
+ 1);
18882 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18885 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18887 case OPC_FMIN_A_df
:
18888 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18890 case OPC_MADDR_Q_df
:
18891 tcg_gen_movi_i32(tdf
, df
+ 1);
18892 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18895 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18898 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18900 case OPC_MSUBR_Q_df
:
18901 tcg_gen_movi_i32(tdf
, df
+ 1);
18902 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18905 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18907 case OPC_FMAX_A_df
:
18908 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18911 MIPS_INVAL("MSA instruction");
18912 generate_exception_end(ctx
, EXCP_RI
);
18916 tcg_temp_free_i32(twd
);
18917 tcg_temp_free_i32(tws
);
18918 tcg_temp_free_i32(twt
);
18919 tcg_temp_free_i32(tdf
);
18922 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18924 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18925 (op & (0x7 << 18)))
18926 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18927 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18928 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18929 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18930 TCGv_i32 twd
= tcg_const_i32(wd
);
18931 TCGv_i32 tws
= tcg_const_i32(ws
);
18932 TCGv_i32 twt
= tcg_const_i32(wt
);
18933 TCGv_i32 tdf
= tcg_const_i32(df
);
18935 switch (MASK_MSA_2R(ctx
->opcode
)) {
18937 #if !defined(TARGET_MIPS64)
18938 /* Double format valid only for MIPS64 */
18939 if (df
== DF_DOUBLE
) {
18940 generate_exception_end(ctx
, EXCP_RI
);
18944 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18947 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18950 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18953 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18956 MIPS_INVAL("MSA instruction");
18957 generate_exception_end(ctx
, EXCP_RI
);
18961 tcg_temp_free_i32(twd
);
18962 tcg_temp_free_i32(tws
);
18963 tcg_temp_free_i32(twt
);
18964 tcg_temp_free_i32(tdf
);
18967 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18969 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18970 (op & (0xf << 17)))
18971 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18972 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18973 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18974 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18975 TCGv_i32 twd
= tcg_const_i32(wd
);
18976 TCGv_i32 tws
= tcg_const_i32(ws
);
18977 TCGv_i32 twt
= tcg_const_i32(wt
);
18978 /* adjust df value for floating-point instruction */
18979 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18981 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18982 case OPC_FCLASS_df
:
18983 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18985 case OPC_FTRUNC_S_df
:
18986 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18988 case OPC_FTRUNC_U_df
:
18989 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18992 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18994 case OPC_FRSQRT_df
:
18995 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18998 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19001 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19004 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19006 case OPC_FEXUPL_df
:
19007 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19009 case OPC_FEXUPR_df
:
19010 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19013 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19016 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19018 case OPC_FTINT_S_df
:
19019 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19021 case OPC_FTINT_U_df
:
19022 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19024 case OPC_FFINT_S_df
:
19025 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19027 case OPC_FFINT_U_df
:
19028 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19032 tcg_temp_free_i32(twd
);
19033 tcg_temp_free_i32(tws
);
19034 tcg_temp_free_i32(twt
);
19035 tcg_temp_free_i32(tdf
);
19038 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19040 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19041 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19042 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19043 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19044 TCGv_i32 twd
= tcg_const_i32(wd
);
19045 TCGv_i32 tws
= tcg_const_i32(ws
);
19046 TCGv_i32 twt
= tcg_const_i32(wt
);
19048 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19050 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19053 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19056 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19059 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19062 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19065 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19068 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19071 MIPS_INVAL("MSA instruction");
19072 generate_exception_end(ctx
, EXCP_RI
);
19076 tcg_temp_free_i32(twd
);
19077 tcg_temp_free_i32(tws
);
19078 tcg_temp_free_i32(twt
);
19081 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19083 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19091 gen_msa_vec_v(env
, ctx
);
19094 gen_msa_2r(env
, ctx
);
19097 gen_msa_2rf(env
, ctx
);
19100 MIPS_INVAL("MSA instruction");
19101 generate_exception_end(ctx
, EXCP_RI
);
19106 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19108 uint32_t opcode
= ctx
->opcode
;
19109 check_insn(ctx
, ASE_MSA
);
19110 check_msa_access(ctx
);
19112 switch (MASK_MSA_MINOR(opcode
)) {
19113 case OPC_MSA_I8_00
:
19114 case OPC_MSA_I8_01
:
19115 case OPC_MSA_I8_02
:
19116 gen_msa_i8(env
, ctx
);
19118 case OPC_MSA_I5_06
:
19119 case OPC_MSA_I5_07
:
19120 gen_msa_i5(env
, ctx
);
19122 case OPC_MSA_BIT_09
:
19123 case OPC_MSA_BIT_0A
:
19124 gen_msa_bit(env
, ctx
);
19126 case OPC_MSA_3R_0D
:
19127 case OPC_MSA_3R_0E
:
19128 case OPC_MSA_3R_0F
:
19129 case OPC_MSA_3R_10
:
19130 case OPC_MSA_3R_11
:
19131 case OPC_MSA_3R_12
:
19132 case OPC_MSA_3R_13
:
19133 case OPC_MSA_3R_14
:
19134 case OPC_MSA_3R_15
:
19135 gen_msa_3r(env
, ctx
);
19138 gen_msa_elm(env
, ctx
);
19140 case OPC_MSA_3RF_1A
:
19141 case OPC_MSA_3RF_1B
:
19142 case OPC_MSA_3RF_1C
:
19143 gen_msa_3rf(env
, ctx
);
19146 gen_msa_vec(env
, ctx
);
19157 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19158 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19159 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19160 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19162 TCGv_i32 twd
= tcg_const_i32(wd
);
19163 TCGv taddr
= tcg_temp_new();
19164 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19166 switch (MASK_MSA_MINOR(opcode
)) {
19168 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19171 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19174 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19177 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19180 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19183 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19186 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19189 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19193 tcg_temp_free_i32(twd
);
19194 tcg_temp_free(taddr
);
19198 MIPS_INVAL("MSA instruction");
19199 generate_exception_end(ctx
, EXCP_RI
);
19205 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19208 int rs
, rt
, rd
, sa
;
19212 /* make sure instructions are on a word boundary */
19213 if (ctx
->pc
& 0x3) {
19214 env
->CP0_BadVAddr
= ctx
->pc
;
19215 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19219 /* Handle blikely not taken case */
19220 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19221 TCGLabel
*l1
= gen_new_label();
19223 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19224 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19225 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19229 op
= MASK_OP_MAJOR(ctx
->opcode
);
19230 rs
= (ctx
->opcode
>> 21) & 0x1f;
19231 rt
= (ctx
->opcode
>> 16) & 0x1f;
19232 rd
= (ctx
->opcode
>> 11) & 0x1f;
19233 sa
= (ctx
->opcode
>> 6) & 0x1f;
19234 imm
= (int16_t)ctx
->opcode
;
19237 decode_opc_special(env
, ctx
);
19240 decode_opc_special2_legacy(env
, ctx
);
19243 decode_opc_special3(env
, ctx
);
19246 op1
= MASK_REGIMM(ctx
->opcode
);
19248 case OPC_BLTZL
: /* REGIMM branches */
19252 check_insn(ctx
, ISA_MIPS2
);
19253 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19257 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19261 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19263 /* OPC_NAL, OPC_BAL */
19264 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19266 generate_exception_end(ctx
, EXCP_RI
);
19269 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19272 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19274 check_insn(ctx
, ISA_MIPS2
);
19275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19276 gen_trap(ctx
, op1
, rs
, -1, imm
);
19279 check_insn(ctx
, ISA_MIPS32R6
);
19280 generate_exception_end(ctx
, EXCP_RI
);
19283 check_insn(ctx
, ISA_MIPS32R2
);
19284 /* Break the TB to be able to sync copied instructions
19286 ctx
->bstate
= BS_STOP
;
19288 case OPC_BPOSGE32
: /* MIPS DSP branch */
19289 #if defined(TARGET_MIPS64)
19293 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19295 #if defined(TARGET_MIPS64)
19297 check_insn(ctx
, ISA_MIPS32R6
);
19298 check_mips_64(ctx
);
19300 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19304 check_insn(ctx
, ISA_MIPS32R6
);
19305 check_mips_64(ctx
);
19307 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19311 default: /* Invalid */
19312 MIPS_INVAL("regimm");
19313 generate_exception_end(ctx
, EXCP_RI
);
19318 check_cp0_enabled(ctx
);
19319 op1
= MASK_CP0(ctx
->opcode
);
19327 #if defined(TARGET_MIPS64)
19331 #ifndef CONFIG_USER_ONLY
19332 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19333 #endif /* !CONFIG_USER_ONLY */
19335 case OPC_C0_FIRST
... OPC_C0_LAST
:
19336 #ifndef CONFIG_USER_ONLY
19337 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19338 #endif /* !CONFIG_USER_ONLY */
19341 #ifndef CONFIG_USER_ONLY
19344 TCGv t0
= tcg_temp_new();
19346 op2
= MASK_MFMC0(ctx
->opcode
);
19349 check_insn(ctx
, ASE_MT
);
19350 gen_helper_dmt(t0
);
19351 gen_store_gpr(t0
, rt
);
19354 check_insn(ctx
, ASE_MT
);
19355 gen_helper_emt(t0
);
19356 gen_store_gpr(t0
, rt
);
19359 check_insn(ctx
, ASE_MT
);
19360 gen_helper_dvpe(t0
, cpu_env
);
19361 gen_store_gpr(t0
, rt
);
19364 check_insn(ctx
, ASE_MT
);
19365 gen_helper_evpe(t0
, cpu_env
);
19366 gen_store_gpr(t0
, rt
);
19369 check_insn(ctx
, ISA_MIPS32R6
);
19371 gen_helper_dvp(t0
, cpu_env
);
19372 gen_store_gpr(t0
, rt
);
19376 check_insn(ctx
, ISA_MIPS32R6
);
19378 gen_helper_evp(t0
, cpu_env
);
19379 gen_store_gpr(t0
, rt
);
19383 check_insn(ctx
, ISA_MIPS32R2
);
19384 save_cpu_state(ctx
, 1);
19385 gen_helper_di(t0
, cpu_env
);
19386 gen_store_gpr(t0
, rt
);
19387 /* Stop translation as we may have switched
19388 the execution mode. */
19389 ctx
->bstate
= BS_STOP
;
19392 check_insn(ctx
, ISA_MIPS32R2
);
19393 save_cpu_state(ctx
, 1);
19394 gen_helper_ei(t0
, cpu_env
);
19395 gen_store_gpr(t0
, rt
);
19396 /* Stop translation as we may have switched
19397 the execution mode. */
19398 ctx
->bstate
= BS_STOP
;
19400 default: /* Invalid */
19401 MIPS_INVAL("mfmc0");
19402 generate_exception_end(ctx
, EXCP_RI
);
19407 #endif /* !CONFIG_USER_ONLY */
19410 check_insn(ctx
, ISA_MIPS32R2
);
19411 gen_load_srsgpr(rt
, rd
);
19414 check_insn(ctx
, ISA_MIPS32R2
);
19415 gen_store_srsgpr(rt
, rd
);
19419 generate_exception_end(ctx
, EXCP_RI
);
19423 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19424 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19425 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19426 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19429 /* Arithmetic with immediate opcode */
19430 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19434 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19436 case OPC_SLTI
: /* Set on less than with immediate opcode */
19438 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19440 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19441 case OPC_LUI
: /* OPC_AUI */
19444 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19446 case OPC_J
... OPC_JAL
: /* Jump */
19447 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19448 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19451 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19452 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19454 generate_exception_end(ctx
, EXCP_RI
);
19457 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19458 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19461 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19464 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19465 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19467 generate_exception_end(ctx
, EXCP_RI
);
19470 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19471 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19474 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19477 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19480 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19482 check_insn(ctx
, ISA_MIPS32R6
);
19483 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19484 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19487 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19490 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19492 check_insn(ctx
, ISA_MIPS32R6
);
19493 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19494 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19499 check_insn(ctx
, ISA_MIPS2
);
19500 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19504 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19506 case OPC_LL
: /* Load and stores */
19507 check_insn(ctx
, ISA_MIPS2
);
19511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19513 case OPC_LB
... OPC_LH
:
19514 case OPC_LW
... OPC_LHU
:
19515 gen_ld(ctx
, op
, rt
, rs
, imm
);
19519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19521 case OPC_SB
... OPC_SH
:
19523 gen_st(ctx
, op
, rt
, rs
, imm
);
19526 check_insn(ctx
, ISA_MIPS2
);
19527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19528 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19531 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19532 check_cp0_enabled(ctx
);
19533 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19534 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19535 gen_cache_operation(ctx
, rt
, rs
, imm
);
19537 /* Treat as NOP. */
19540 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19541 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19542 /* Treat as NOP. */
19545 /* Floating point (COP1). */
19550 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19554 op1
= MASK_CP1(ctx
->opcode
);
19559 check_cp1_enabled(ctx
);
19560 check_insn(ctx
, ISA_MIPS32R2
);
19565 check_cp1_enabled(ctx
);
19566 gen_cp1(ctx
, op1
, rt
, rd
);
19568 #if defined(TARGET_MIPS64)
19571 check_cp1_enabled(ctx
);
19572 check_insn(ctx
, ISA_MIPS3
);
19573 check_mips_64(ctx
);
19574 gen_cp1(ctx
, op1
, rt
, rd
);
19577 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19578 check_cp1_enabled(ctx
);
19579 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19581 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19586 check_insn(ctx
, ASE_MIPS3D
);
19587 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19588 (rt
>> 2) & 0x7, imm
<< 2);
19592 check_cp1_enabled(ctx
);
19593 check_insn(ctx
, ISA_MIPS32R6
);
19594 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19598 check_cp1_enabled(ctx
);
19599 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19601 check_insn(ctx
, ASE_MIPS3D
);
19604 check_cp1_enabled(ctx
);
19605 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19606 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19607 (rt
>> 2) & 0x7, imm
<< 2);
19614 check_cp1_enabled(ctx
);
19615 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19621 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19622 check_cp1_enabled(ctx
);
19623 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19625 case R6_OPC_CMP_AF_S
:
19626 case R6_OPC_CMP_UN_S
:
19627 case R6_OPC_CMP_EQ_S
:
19628 case R6_OPC_CMP_UEQ_S
:
19629 case R6_OPC_CMP_LT_S
:
19630 case R6_OPC_CMP_ULT_S
:
19631 case R6_OPC_CMP_LE_S
:
19632 case R6_OPC_CMP_ULE_S
:
19633 case R6_OPC_CMP_SAF_S
:
19634 case R6_OPC_CMP_SUN_S
:
19635 case R6_OPC_CMP_SEQ_S
:
19636 case R6_OPC_CMP_SEUQ_S
:
19637 case R6_OPC_CMP_SLT_S
:
19638 case R6_OPC_CMP_SULT_S
:
19639 case R6_OPC_CMP_SLE_S
:
19640 case R6_OPC_CMP_SULE_S
:
19641 case R6_OPC_CMP_OR_S
:
19642 case R6_OPC_CMP_UNE_S
:
19643 case R6_OPC_CMP_NE_S
:
19644 case R6_OPC_CMP_SOR_S
:
19645 case R6_OPC_CMP_SUNE_S
:
19646 case R6_OPC_CMP_SNE_S
:
19647 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19649 case R6_OPC_CMP_AF_D
:
19650 case R6_OPC_CMP_UN_D
:
19651 case R6_OPC_CMP_EQ_D
:
19652 case R6_OPC_CMP_UEQ_D
:
19653 case R6_OPC_CMP_LT_D
:
19654 case R6_OPC_CMP_ULT_D
:
19655 case R6_OPC_CMP_LE_D
:
19656 case R6_OPC_CMP_ULE_D
:
19657 case R6_OPC_CMP_SAF_D
:
19658 case R6_OPC_CMP_SUN_D
:
19659 case R6_OPC_CMP_SEQ_D
:
19660 case R6_OPC_CMP_SEUQ_D
:
19661 case R6_OPC_CMP_SLT_D
:
19662 case R6_OPC_CMP_SULT_D
:
19663 case R6_OPC_CMP_SLE_D
:
19664 case R6_OPC_CMP_SULE_D
:
19665 case R6_OPC_CMP_OR_D
:
19666 case R6_OPC_CMP_UNE_D
:
19667 case R6_OPC_CMP_NE_D
:
19668 case R6_OPC_CMP_SOR_D
:
19669 case R6_OPC_CMP_SUNE_D
:
19670 case R6_OPC_CMP_SNE_D
:
19671 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19674 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19675 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19680 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19695 check_insn(ctx
, ASE_MSA
);
19696 gen_msa_branch(env
, ctx
, op1
);
19700 generate_exception_end(ctx
, EXCP_RI
);
19705 /* Compact branches [R6] and COP2 [non-R6] */
19706 case OPC_BC
: /* OPC_LWC2 */
19707 case OPC_BALC
: /* OPC_SWC2 */
19708 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19709 /* OPC_BC, OPC_BALC */
19710 gen_compute_compact_branch(ctx
, op
, 0, 0,
19711 sextract32(ctx
->opcode
<< 2, 0, 28));
19713 /* OPC_LWC2, OPC_SWC2 */
19714 /* COP2: Not implemented. */
19715 generate_exception_err(ctx
, EXCP_CpU
, 2);
19718 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19719 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19720 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19722 /* OPC_BEQZC, OPC_BNEZC */
19723 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19724 sextract32(ctx
->opcode
<< 2, 0, 23));
19726 /* OPC_JIC, OPC_JIALC */
19727 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19730 /* OPC_LWC2, OPC_SWC2 */
19731 /* COP2: Not implemented. */
19732 generate_exception_err(ctx
, EXCP_CpU
, 2);
19736 check_insn(ctx
, INSN_LOONGSON2F
);
19737 /* Note that these instructions use different fields. */
19738 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19742 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19743 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19744 check_cp1_enabled(ctx
);
19745 op1
= MASK_CP3(ctx
->opcode
);
19749 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19755 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19756 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19759 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19760 /* Treat as NOP. */
19763 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19777 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19778 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19782 generate_exception_end(ctx
, EXCP_RI
);
19786 generate_exception_err(ctx
, EXCP_CpU
, 1);
19790 #if defined(TARGET_MIPS64)
19791 /* MIPS64 opcodes */
19792 case OPC_LDL
... OPC_LDR
:
19794 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19798 check_insn(ctx
, ISA_MIPS3
);
19799 check_mips_64(ctx
);
19800 gen_ld(ctx
, op
, rt
, rs
, imm
);
19802 case OPC_SDL
... OPC_SDR
:
19803 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19806 check_insn(ctx
, ISA_MIPS3
);
19807 check_mips_64(ctx
);
19808 gen_st(ctx
, op
, rt
, rs
, imm
);
19811 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19812 check_insn(ctx
, ISA_MIPS3
);
19813 check_mips_64(ctx
);
19814 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19816 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19817 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19818 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19819 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19822 check_insn(ctx
, ISA_MIPS3
);
19823 check_mips_64(ctx
);
19824 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19828 check_insn(ctx
, ISA_MIPS3
);
19829 check_mips_64(ctx
);
19830 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19833 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19834 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19835 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19837 MIPS_INVAL("major opcode");
19838 generate_exception_end(ctx
, EXCP_RI
);
19842 case OPC_DAUI
: /* OPC_JALX */
19843 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19844 #if defined(TARGET_MIPS64)
19846 check_mips_64(ctx
);
19848 generate_exception(ctx
, EXCP_RI
);
19849 } else if (rt
!= 0) {
19850 TCGv t0
= tcg_temp_new();
19851 gen_load_gpr(t0
, rs
);
19852 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19856 generate_exception_end(ctx
, EXCP_RI
);
19857 MIPS_INVAL("major opcode");
19861 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19862 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19863 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19866 case OPC_MSA
: /* OPC_MDMX */
19867 /* MDMX: Not implemented. */
19871 check_insn(ctx
, ISA_MIPS32R6
);
19872 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19874 default: /* Invalid */
19875 MIPS_INVAL("major opcode");
19876 generate_exception_end(ctx
, EXCP_RI
);
19881 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19883 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19884 CPUState
*cs
= CPU(cpu
);
19886 target_ulong pc_start
;
19887 target_ulong next_page_start
;
19894 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19897 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19898 ctx
.insn_flags
= env
->insn_flags
;
19899 ctx
.CP0_Config1
= env
->CP0_Config1
;
19901 ctx
.bstate
= BS_NONE
;
19903 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19904 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19905 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19906 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19907 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19908 ctx
.PAMask
= env
->PAMask
;
19909 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19910 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19911 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19912 /* Restore delay slot state from the tb context. */
19913 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19914 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19915 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19916 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19917 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19918 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19919 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
19920 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
19921 restore_cpu_state(env
, &ctx
);
19922 #ifdef CONFIG_USER_ONLY
19923 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19925 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19927 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19928 MO_UNALN
: MO_ALIGN
;
19930 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19931 if (max_insns
== 0) {
19932 max_insns
= CF_COUNT_MASK
;
19934 if (max_insns
> TCG_MAX_INSNS
) {
19935 max_insns
= TCG_MAX_INSNS
;
19938 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19940 while (ctx
.bstate
== BS_NONE
) {
19941 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19944 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19945 save_cpu_state(&ctx
, 1);
19946 ctx
.bstate
= BS_BRANCH
;
19947 gen_helper_raise_exception_debug(cpu_env
);
19948 /* The address covered by the breakpoint must be included in
19949 [tb->pc, tb->pc + tb->size) in order to for it to be
19950 properly cleared -- thus we increment the PC here so that
19951 the logic setting tb->size below does the right thing. */
19953 goto done_generating
;
19956 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19960 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19961 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19962 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19964 decode_opc(env
, &ctx
);
19965 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19966 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19967 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19968 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19969 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19970 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19972 generate_exception_end(&ctx
, EXCP_RI
);
19976 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19977 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19978 MIPS_HFLAG_FBNSLOT
))) {
19979 /* force to generate branch as there is neither delay nor
19983 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19984 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19985 /* Force to generate branch as microMIPS R6 doesn't restrict
19986 branches in the forbidden slot. */
19991 gen_branch(&ctx
, insn_bytes
);
19993 ctx
.pc
+= insn_bytes
;
19995 /* Execute a branch and its delay slot as a single instruction.
19996 This is what GDB expects and is consistent with what the
19997 hardware does (e.g. if a delay slot instruction faults, the
19998 reported PC is the PC of the branch). */
19999 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20003 if (ctx
.pc
>= next_page_start
) {
20007 if (tcg_op_buf_full()) {
20011 if (num_insns
>= max_insns
)
20017 if (tb
->cflags
& CF_LAST_IO
) {
20020 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20021 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20022 gen_helper_raise_exception_debug(cpu_env
);
20024 switch (ctx
.bstate
) {
20026 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20029 save_cpu_state(&ctx
, 0);
20030 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20033 tcg_gen_exit_tb(0);
20041 gen_tb_end(tb
, num_insns
);
20043 tb
->size
= ctx
.pc
- pc_start
;
20044 tb
->icount
= num_insns
;
20048 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20049 && qemu_log_in_addr_range(pc_start
)) {
20051 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20052 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20059 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20063 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20065 #define printfpr(fp) \
20068 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20069 " fd:%13g fs:%13g psu: %13g\n", \
20070 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20071 (double)(fp)->fd, \
20072 (double)(fp)->fs[FP_ENDIAN_IDX], \
20073 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20076 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20077 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20078 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20079 " fd:%13g fs:%13g psu:%13g\n", \
20080 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20082 (double)tmp.fs[FP_ENDIAN_IDX], \
20083 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20088 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20089 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20090 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20091 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20092 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20093 printfpr(&env
->active_fpu
.fpr
[i
]);
20099 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20102 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20103 CPUMIPSState
*env
= &cpu
->env
;
20106 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20107 " LO=0x" TARGET_FMT_lx
" ds %04x "
20108 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20109 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20110 env
->hflags
, env
->btarget
, env
->bcond
);
20111 for (i
= 0; i
< 32; i
++) {
20113 cpu_fprintf(f
, "GPR%02d:", i
);
20114 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20116 cpu_fprintf(f
, "\n");
20119 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20120 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20121 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20123 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20124 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20125 env
->CP0_Config2
, env
->CP0_Config3
);
20126 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20127 env
->CP0_Config4
, env
->CP0_Config5
);
20128 if (env
->hflags
& MIPS_HFLAG_FPU
)
20129 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20132 void mips_tcg_init(void)
20137 /* Initialize various static tables. */
20141 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20142 tcg_ctx
.tcg_env
= cpu_env
;
20144 TCGV_UNUSED(cpu_gpr
[0]);
20145 for (i
= 1; i
< 32; i
++)
20146 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20147 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20150 for (i
= 0; i
< 32; i
++) {
20151 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20153 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20154 /* The scalar floating-point unit (FPU) registers are mapped on
20155 * the MSA vector registers. */
20156 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20157 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20158 msa_wr_d
[i
* 2 + 1] =
20159 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20162 cpu_PC
= tcg_global_mem_new(cpu_env
,
20163 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20164 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20165 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20166 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20168 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20169 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20172 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20173 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20175 bcond
= tcg_global_mem_new(cpu_env
,
20176 offsetof(CPUMIPSState
, bcond
), "bcond");
20177 btarget
= tcg_global_mem_new(cpu_env
,
20178 offsetof(CPUMIPSState
, btarget
), "btarget");
20179 hflags
= tcg_global_mem_new_i32(cpu_env
,
20180 offsetof(CPUMIPSState
, hflags
), "hflags");
20182 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20183 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20185 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20186 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20192 #include "translate_init.c"
20194 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20198 const mips_def_t
*def
;
20200 def
= cpu_mips_find_by_name(cpu_model
);
20203 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20205 env
->cpu_model
= def
;
20206 env
->exception_base
= (int32_t)0xBFC00000;
20208 #ifndef CONFIG_USER_ONLY
20209 mmu_init(env
, def
);
20211 fpu_init(env
, def
);
20212 mvp_init(env
, def
);
20214 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20219 bool cpu_supports_cps_smp(const char *cpu_model
)
20221 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20226 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20229 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20231 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20236 return (def
->insn_flags
& isa
) != 0;
20239 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20241 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20242 vp
->env
.exception_base
= address
;
20245 void cpu_state_reset(CPUMIPSState
*env
)
20247 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20248 CPUState
*cs
= CPU(cpu
);
20250 /* Reset registers to their default values */
20251 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20252 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20253 #ifdef TARGET_WORDS_BIGENDIAN
20254 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20256 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20257 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20258 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20259 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20260 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20261 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20262 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20263 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20264 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20265 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20266 << env
->cpu_model
->CP0_LLAddr_shift
;
20267 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20268 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20269 env
->CCRes
= env
->cpu_model
->CCRes
;
20270 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20271 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20272 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20273 env
->current_tc
= 0;
20274 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20275 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20276 #if defined(TARGET_MIPS64)
20277 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20278 env
->SEGMask
|= 3ULL << 62;
20281 env
->PABITS
= env
->cpu_model
->PABITS
;
20282 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20283 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20284 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20285 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20286 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20287 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20288 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20289 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20290 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20291 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20292 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20293 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20294 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20295 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20296 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20297 env
->msair
= env
->cpu_model
->MSAIR
;
20298 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20300 #if defined(CONFIG_USER_ONLY)
20301 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20302 # ifdef TARGET_MIPS64
20303 /* Enable 64-bit register mode. */
20304 env
->CP0_Status
|= (1 << CP0St_PX
);
20306 # ifdef TARGET_ABI_MIPSN64
20307 /* Enable 64-bit address mode. */
20308 env
->CP0_Status
|= (1 << CP0St_UX
);
20310 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20311 hardware registers. */
20312 env
->CP0_HWREna
|= 0x0000000F;
20313 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20314 env
->CP0_Status
|= (1 << CP0St_CU1
);
20316 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20317 env
->CP0_Status
|= (1 << CP0St_MX
);
20319 # if defined(TARGET_MIPS64)
20320 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20321 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20322 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20323 env
->CP0_Status
|= (1 << CP0St_FR
);
20327 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20328 /* If the exception was raised from a delay slot,
20329 come back to the jump. */
20330 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20331 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20333 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20335 env
->active_tc
.PC
= env
->exception_base
;
20336 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20337 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20338 env
->CP0_Wired
= 0;
20339 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20340 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20341 if (kvm_enabled()) {
20342 env
->CP0_EBase
|= 0x40000000;
20344 env
->CP0_EBase
|= 0x80000000;
20346 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20347 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20349 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20351 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20352 /* vectored interrupts not implemented, timer on int 7,
20353 no performance counters. */
20354 env
->CP0_IntCtl
= 0xe0000000;
20358 for (i
= 0; i
< 7; i
++) {
20359 env
->CP0_WatchLo
[i
] = 0;
20360 env
->CP0_WatchHi
[i
] = 0x80000000;
20362 env
->CP0_WatchLo
[7] = 0;
20363 env
->CP0_WatchHi
[7] = 0;
20365 /* Count register increments in debug mode, EJTAG version 1 */
20366 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20368 cpu_mips_store_count(env
, 1);
20370 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20373 /* Only TC0 on VPE 0 starts as active. */
20374 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20375 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20376 env
->tcs
[i
].CP0_TCHalt
= 1;
20378 env
->active_tc
.CP0_TCHalt
= 1;
20381 if (cs
->cpu_index
== 0) {
20382 /* VPE0 starts up enabled. */
20383 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20384 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20386 /* TC0 starts up unhalted. */
20388 env
->active_tc
.CP0_TCHalt
= 0;
20389 env
->tcs
[0].CP0_TCHalt
= 0;
20390 /* With thread 0 active. */
20391 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20392 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20396 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20397 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20398 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20399 env
->CP0_Status
|= (1 << CP0St_FR
);
20403 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20407 compute_hflags(env
);
20408 restore_fp_status(env
);
20409 restore_pamask(env
);
20410 cs
->exception_index
= EXCP_NONE
;
20412 if (semihosting_get_argc()) {
20413 /* UHI interface can be used to obtain argc and argv */
20414 env
->active_tc
.gpr
[4] = -1;
20418 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20419 target_ulong
*data
)
20421 env
->active_tc
.PC
= data
[0];
20422 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20423 env
->hflags
|= data
[1];
20424 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20425 case MIPS_HFLAG_BR
:
20427 case MIPS_HFLAG_BC
:
20428 case MIPS_HFLAG_BL
:
20430 env
->btarget
= data
[2];