2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "trace-tcg.h"
39 #define MIPS_DEBUG_DISAS 0
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL
= (0x00 << 26),
47 OPC_REGIMM
= (0x01 << 26),
48 OPC_CP0
= (0x10 << 26),
49 OPC_CP1
= (0x11 << 26),
50 OPC_CP2
= (0x12 << 26),
51 OPC_CP3
= (0x13 << 26),
52 OPC_SPECIAL2
= (0x1C << 26),
53 OPC_SPECIAL3
= (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI
= (0x08 << 26),
56 OPC_ADDIU
= (0x09 << 26),
57 OPC_SLTI
= (0x0A << 26),
58 OPC_SLTIU
= (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI
= (0x0C << 26),
61 OPC_ORI
= (0x0D << 26),
62 OPC_XORI
= (0x0E << 26),
63 OPC_LUI
= (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26),
79 OPC_DAUI
= (0x1D << 26),
81 OPC_LDL
= (0x1A << 26),
82 OPC_LDR
= (0x1B << 26),
83 OPC_LB
= (0x20 << 26),
84 OPC_LH
= (0x21 << 26),
85 OPC_LWL
= (0x22 << 26),
86 OPC_LW
= (0x23 << 26),
87 OPC_LWPC
= OPC_LW
| 0x5,
88 OPC_LBU
= (0x24 << 26),
89 OPC_LHU
= (0x25 << 26),
90 OPC_LWR
= (0x26 << 26),
91 OPC_LWU
= (0x27 << 26),
92 OPC_SB
= (0x28 << 26),
93 OPC_SH
= (0x29 << 26),
94 OPC_SWL
= (0x2A << 26),
95 OPC_SW
= (0x2B << 26),
96 OPC_SDL
= (0x2C << 26),
97 OPC_SDR
= (0x2D << 26),
98 OPC_SWR
= (0x2E << 26),
99 OPC_LL
= (0x30 << 26),
100 OPC_LLD
= (0x34 << 26),
101 OPC_LD
= (0x37 << 26),
102 OPC_LDPC
= OPC_LD
| 0x5,
103 OPC_SC
= (0x38 << 26),
104 OPC_SCD
= (0x3C << 26),
105 OPC_SD
= (0x3F << 26),
106 /* Floating point load/store */
107 OPC_LWC1
= (0x31 << 26),
108 OPC_LWC2
= (0x32 << 26),
109 OPC_LDC1
= (0x35 << 26),
110 OPC_LDC2
= (0x36 << 26),
111 OPC_SWC1
= (0x39 << 26),
112 OPC_SWC2
= (0x3A << 26),
113 OPC_SDC1
= (0x3D << 26),
114 OPC_SDC2
= (0x3E << 26),
115 /* Compact Branches */
116 OPC_BLEZALC
= (0x06 << 26),
117 OPC_BGEZALC
= (0x06 << 26),
118 OPC_BGEUC
= (0x06 << 26),
119 OPC_BGTZALC
= (0x07 << 26),
120 OPC_BLTZALC
= (0x07 << 26),
121 OPC_BLTUC
= (0x07 << 26),
122 OPC_BOVC
= (0x08 << 26),
123 OPC_BEQZALC
= (0x08 << 26),
124 OPC_BEQC
= (0x08 << 26),
125 OPC_BLEZC
= (0x16 << 26),
126 OPC_BGEZC
= (0x16 << 26),
127 OPC_BGEC
= (0x16 << 26),
128 OPC_BGTZC
= (0x17 << 26),
129 OPC_BLTZC
= (0x17 << 26),
130 OPC_BLTC
= (0x17 << 26),
131 OPC_BNVC
= (0x18 << 26),
132 OPC_BNEZALC
= (0x18 << 26),
133 OPC_BNEC
= (0x18 << 26),
134 OPC_BC
= (0x32 << 26),
135 OPC_BEQZC
= (0x36 << 26),
136 OPC_JIC
= (0x36 << 26),
137 OPC_BALC
= (0x3A << 26),
138 OPC_BNEZC
= (0x3E << 26),
139 OPC_JIALC
= (0x3E << 26),
140 /* MDMX ASE specific */
141 OPC_MDMX
= (0x1E << 26),
142 /* MSA ASE, same as MDMX */
144 /* Cache and prefetch */
145 OPC_CACHE
= (0x2F << 26),
146 OPC_PREF
= (0x33 << 26),
147 /* PC-relative address computation / loads */
148 OPC_PCREL
= (0x3B << 26),
151 /* PC-relative address computation / loads */
152 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
153 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
155 /* Instructions determined by bits 19 and 20 */
156 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
157 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
158 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
160 /* Instructions determined by bits 16 ... 20 */
161 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
162 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
165 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
168 /* MIPS special opcodes */
169 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
173 OPC_SLL
= 0x00 | OPC_SPECIAL
,
174 /* NOP is SLL r0, r0, 0 */
175 /* SSNOP is SLL r0, r0, 1 */
176 /* EHB is SLL r0, r0, 3 */
177 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
178 OPC_ROTR
= OPC_SRL
| (1 << 21),
179 OPC_SRA
= 0x03 | OPC_SPECIAL
,
180 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
181 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
182 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
183 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
184 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
185 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
186 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
187 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
188 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
189 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
190 OPC_DROTR
= OPC_DSRL
| (1 << 21),
191 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
192 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
193 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
194 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
195 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
196 /* Multiplication / division */
197 OPC_MULT
= 0x18 | OPC_SPECIAL
,
198 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
199 OPC_DIV
= 0x1A | OPC_SPECIAL
,
200 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
201 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
202 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
203 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
204 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
206 /* 2 registers arithmetic / logic */
207 OPC_ADD
= 0x20 | OPC_SPECIAL
,
208 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
209 OPC_SUB
= 0x22 | OPC_SPECIAL
,
210 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
211 OPC_AND
= 0x24 | OPC_SPECIAL
,
212 OPC_OR
= 0x25 | OPC_SPECIAL
,
213 OPC_XOR
= 0x26 | OPC_SPECIAL
,
214 OPC_NOR
= 0x27 | OPC_SPECIAL
,
215 OPC_SLT
= 0x2A | OPC_SPECIAL
,
216 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
217 OPC_DADD
= 0x2C | OPC_SPECIAL
,
218 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
219 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
220 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
222 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
223 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
225 OPC_TGE
= 0x30 | OPC_SPECIAL
,
226 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
227 OPC_TLT
= 0x32 | OPC_SPECIAL
,
228 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
229 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
230 OPC_TNE
= 0x36 | OPC_SPECIAL
,
231 /* HI / LO registers load & stores */
232 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
233 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
234 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
235 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
236 /* Conditional moves */
237 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
238 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
240 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
241 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
243 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
246 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
247 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
248 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
249 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
250 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
252 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
253 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
254 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
255 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
258 /* R6 Multiply and Divide instructions have the same Opcode
259 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
260 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
263 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
264 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
265 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
266 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
267 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
268 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
269 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
270 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
272 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
273 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
274 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
275 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
276 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
277 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
278 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
279 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
281 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
282 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
283 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
284 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
285 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
287 OPC_LSA
= 0x05 | OPC_SPECIAL
,
288 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
291 /* Multiplication variants of the vr54xx. */
292 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
295 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
296 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
298 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
300 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
304 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
305 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
306 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
307 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
308 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
311 /* REGIMM (rt field) opcodes */
312 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
315 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
316 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
317 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
318 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
319 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
320 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
321 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
322 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
323 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
324 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
325 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
326 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
327 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
328 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
329 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
330 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
332 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
333 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
336 /* Special2 opcodes */
337 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
340 /* Multiply & xxx operations */
341 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
342 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
343 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
344 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
345 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
347 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
348 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
349 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
350 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
351 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
352 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
353 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
354 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
355 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
356 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
357 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
358 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
360 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
361 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
362 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
363 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
365 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
368 /* Special3 opcodes */
369 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
372 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
373 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
374 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
375 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
376 OPC_INS
= 0x04 | OPC_SPECIAL3
,
377 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
378 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
379 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
380 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
381 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
382 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
383 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
384 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
387 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
388 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
389 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
390 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
391 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
392 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
393 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
394 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
395 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
396 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
397 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
398 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
401 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
402 /* MIPS DSP Arithmetic */
403 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
404 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
407 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
408 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
409 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
410 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
411 /* MIPS DSP GPR-Based Shift Sub-class */
412 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
413 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
414 /* MIPS DSP Multiply Sub-class insns */
415 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
416 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
417 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
418 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
419 /* DSP Bit/Manipulation Sub-class */
420 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
421 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
422 /* MIPS DSP Append Sub-class */
423 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
424 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
425 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
426 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
427 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
430 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
431 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
432 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
433 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
434 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
435 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
439 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
442 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
443 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
444 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
445 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
446 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
447 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
451 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
454 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
455 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
456 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
457 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
458 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
461 /* MIPS DSP REGIMM opcodes */
463 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
464 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
467 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
470 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
471 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
472 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
473 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
476 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
478 /* MIPS DSP Arithmetic Sub-class */
479 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
497 /* MIPS DSP Multiply Sub-class insns */
498 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
506 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
507 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
509 /* MIPS DSP Arithmetic Sub-class */
510 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
522 /* MIPS DSP Multiply Sub-class insns */
523 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
529 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
531 /* MIPS DSP Arithmetic Sub-class */
532 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
545 /* DSP Bit/Manipulation Sub-class */
546 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
553 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
555 /* MIPS DSP Arithmetic Sub-class */
556 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
563 /* DSP Compare-Pick Sub-class */
564 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
581 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
583 /* MIPS DSP GPR-Based Shift Sub-class */
584 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
608 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Multiply Sub-class insns */
611 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
635 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* DSP Bit/Manipulation Sub-class */
638 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
641 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
643 /* MIPS DSP Append Sub-class */
644 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
645 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
646 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
649 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
651 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
652 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
666 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
667 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
668 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
671 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Arithmetic Sub-class */
674 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
691 /* DSP Bit/Manipulation Sub-class */
692 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
700 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Multiply Sub-class insns */
703 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
708 /* MIPS DSP Arithmetic Sub-class */
709 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
732 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* DSP Compare-Pick Sub-class */
735 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
754 /* MIPS DSP Arithmetic Sub-class */
755 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
765 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
767 /* DSP Append Sub-class */
768 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
771 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
774 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
776 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
777 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
800 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
802 /* DSP Bit/Manipulation Sub-class */
803 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
806 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Multiply Sub-class insns */
809 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
837 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
839 /* MIPS DSP GPR-Based Shift Sub-class */
840 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
868 /* Coprocessor 0 (rs field) */
869 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
872 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
873 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
874 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
875 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
876 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
877 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
878 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
879 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
880 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
881 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
882 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
883 OPC_C0
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
889 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
892 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
895 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
896 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
899 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
902 /* Coprocessor 0 (with rs == C0) */
903 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
906 OPC_TLBR
= 0x01 | OPC_C0
,
907 OPC_TLBWI
= 0x02 | OPC_C0
,
908 OPC_TLBINV
= 0x03 | OPC_C0
,
909 OPC_TLBINVF
= 0x04 | OPC_C0
,
910 OPC_TLBWR
= 0x06 | OPC_C0
,
911 OPC_TLBP
= 0x08 | OPC_C0
,
912 OPC_RFE
= 0x10 | OPC_C0
,
913 OPC_ERET
= 0x18 | OPC_C0
,
914 OPC_DERET
= 0x1F | OPC_C0
,
915 OPC_WAIT
= 0x20 | OPC_C0
,
918 /* Coprocessor 1 (rs field) */
919 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
921 /* Values for the fmt field in FP instructions */
923 /* 0 - 15 are reserved */
924 FMT_S
= 16, /* single fp */
925 FMT_D
= 17, /* double fp */
926 FMT_E
= 18, /* extended fp */
927 FMT_Q
= 19, /* quad fp */
928 FMT_W
= 20, /* 32-bit fixed */
929 FMT_L
= 21, /* 64-bit fixed */
930 FMT_PS
= 22, /* paired single fp */
931 /* 23 - 31 are reserved */
935 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
936 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
937 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
938 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
939 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
940 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
941 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
942 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
943 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
944 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
945 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
946 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
947 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
948 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
949 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
950 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
951 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
952 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
953 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
954 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
955 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
956 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
957 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
958 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
959 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
960 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
961 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
962 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
963 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
964 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
967 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
968 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
971 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
972 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
973 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
974 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
978 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
979 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
983 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
984 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
987 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
990 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
991 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
992 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
993 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
994 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
995 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
996 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
997 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
998 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
999 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1000 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1003 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1006 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1015 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1024 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1028 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1029 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1030 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1031 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1033 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1042 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1049 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1056 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1063 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1070 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1077 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1084 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1091 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1100 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1103 OPC_LWXC1
= 0x00 | OPC_CP3
,
1104 OPC_LDXC1
= 0x01 | OPC_CP3
,
1105 OPC_LUXC1
= 0x05 | OPC_CP3
,
1106 OPC_SWXC1
= 0x08 | OPC_CP3
,
1107 OPC_SDXC1
= 0x09 | OPC_CP3
,
1108 OPC_SUXC1
= 0x0D | OPC_CP3
,
1109 OPC_PREFX
= 0x0F | OPC_CP3
,
1110 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1111 OPC_MADD_S
= 0x20 | OPC_CP3
,
1112 OPC_MADD_D
= 0x21 | OPC_CP3
,
1113 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1114 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1115 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1116 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1117 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1118 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1119 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1120 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1121 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1122 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1126 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1128 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1129 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1130 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1131 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1132 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1133 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1134 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1135 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1136 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1137 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1138 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1139 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1140 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1141 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1142 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1143 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1144 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1145 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1146 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1147 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1148 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1150 /* MI10 instruction */
1151 OPC_LD_B
= (0x20) | OPC_MSA
,
1152 OPC_LD_H
= (0x21) | OPC_MSA
,
1153 OPC_LD_W
= (0x22) | OPC_MSA
,
1154 OPC_LD_D
= (0x23) | OPC_MSA
,
1155 OPC_ST_B
= (0x24) | OPC_MSA
,
1156 OPC_ST_H
= (0x25) | OPC_MSA
,
1157 OPC_ST_W
= (0x26) | OPC_MSA
,
1158 OPC_ST_D
= (0x27) | OPC_MSA
,
1162 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1163 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1164 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1165 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1166 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1168 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1169 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1170 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1171 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1172 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1173 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1174 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1176 /* I8 instruction */
1177 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1178 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1180 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1181 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1182 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1183 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1184 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1185 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1186 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1188 /* VEC/2R/2RF instruction */
1189 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1190 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1191 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1192 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1193 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1194 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1195 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1200 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1201 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1202 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1203 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1204 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1206 /* 2RF instruction df(bit 16) = _w, _d */
1207 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1210 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1211 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1212 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1213 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1214 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1217 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1218 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1224 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1225 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1226 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1227 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1228 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1229 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1230 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1231 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1232 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1233 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1234 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1235 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1236 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1237 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1238 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1239 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1240 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1241 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1242 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1243 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1244 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1245 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1246 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1247 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1248 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1249 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1250 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1251 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1252 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1253 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1254 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1255 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1256 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1257 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1258 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1259 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1260 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1261 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1262 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1263 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1264 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1265 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1266 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1267 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1268 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1270 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1271 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1272 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1273 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1274 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1275 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1282 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1283 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1284 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1285 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1286 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1287 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1289 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1290 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1292 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1294 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1296 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1300 /* 3RF instruction _df(bit 21) = _w, _d */
1301 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1302 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1303 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1314 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1317 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1318 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1339 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1343 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1344 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1347 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1348 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1349 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1350 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1351 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1352 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1358 /* global register indices */
1359 static TCGv_env cpu_env
;
1360 static TCGv cpu_gpr
[32], cpu_PC
;
1361 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1362 static TCGv cpu_dspctrl
, btarget
, bcond
;
1363 static TCGv_i32 hflags
;
1364 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1365 static TCGv_i64 fpu_f64
[32];
1366 static TCGv_i64 msa_wr_d
[64];
1368 #include "exec/gen-icount.h"
1370 #define gen_helper_0e0i(name, arg) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1372 gen_helper_##name(cpu_env, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_0e1i(name, arg1, arg2) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1378 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e0i(name, ret, arg1) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1384 gen_helper_##name(ret, cpu_env, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1390 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1402 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1407 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1408 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1409 tcg_temp_free_i32(helper_tmp); \
1412 typedef struct DisasContext
{
1413 struct TranslationBlock
*tb
;
1414 target_ulong pc
, saved_pc
;
1416 int singlestep_enabled
;
1418 int32_t CP0_Config1
;
1419 /* Routine used to access memory */
1421 TCGMemOp default_tcg_memop_mask
;
1422 uint32_t hflags
, saved_hflags
;
1424 target_ulong btarget
;
1433 int CP0_LLAddr_shift
;
1443 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1444 * exception condition */
1445 BS_STOP
= 1, /* We want to stop translation for any reason */
1446 BS_BRANCH
= 2, /* We reached a branch condition */
1447 BS_EXCP
= 3, /* We reached an exception condition */
1450 static const char * const regnames
[] = {
1451 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1452 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1453 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1454 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1457 static const char * const regnames_HI
[] = {
1458 "HI0", "HI1", "HI2", "HI3",
1461 static const char * const regnames_LO
[] = {
1462 "LO0", "LO1", "LO2", "LO3",
1465 static const char * const fregnames
[] = {
1466 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1467 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1468 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1469 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1472 static const char * const msaregnames
[] = {
1473 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1474 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1475 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1476 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1477 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1478 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1479 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1480 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1481 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1482 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1483 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1484 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1485 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1486 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1487 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1488 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1491 #define LOG_DISAS(...) \
1493 if (MIPS_DEBUG_DISAS) { \
1494 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1498 #define MIPS_INVAL(op) \
1500 if (MIPS_DEBUG_DISAS) { \
1501 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1502 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1503 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1504 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1508 /* General purpose registers moves. */
1509 static inline void gen_load_gpr (TCGv t
, int reg
)
1512 tcg_gen_movi_tl(t
, 0);
1514 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1517 static inline void gen_store_gpr (TCGv t
, int reg
)
1520 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1523 /* Moves to/from shadow registers. */
1524 static inline void gen_load_srsgpr (int from
, int to
)
1526 TCGv t0
= tcg_temp_new();
1529 tcg_gen_movi_tl(t0
, 0);
1531 TCGv_i32 t2
= tcg_temp_new_i32();
1532 TCGv_ptr addr
= tcg_temp_new_ptr();
1534 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1535 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1536 tcg_gen_andi_i32(t2
, t2
, 0xf);
1537 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1538 tcg_gen_ext_i32_ptr(addr
, t2
);
1539 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1541 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1542 tcg_temp_free_ptr(addr
);
1543 tcg_temp_free_i32(t2
);
1545 gen_store_gpr(t0
, to
);
1549 static inline void gen_store_srsgpr (int from
, int to
)
1552 TCGv t0
= tcg_temp_new();
1553 TCGv_i32 t2
= tcg_temp_new_i32();
1554 TCGv_ptr addr
= tcg_temp_new_ptr();
1556 gen_load_gpr(t0
, from
);
1557 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1558 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1559 tcg_gen_andi_i32(t2
, t2
, 0xf);
1560 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1561 tcg_gen_ext_i32_ptr(addr
, t2
);
1562 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1564 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1565 tcg_temp_free_ptr(addr
);
1566 tcg_temp_free_i32(t2
);
1572 static inline void gen_save_pc(target_ulong pc
)
1574 tcg_gen_movi_tl(cpu_PC
, pc
);
1577 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1579 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1580 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1581 gen_save_pc(ctx
->pc
);
1582 ctx
->saved_pc
= ctx
->pc
;
1584 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1585 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1586 ctx
->saved_hflags
= ctx
->hflags
;
1587 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1593 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1599 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1601 ctx
->saved_hflags
= ctx
->hflags
;
1602 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1608 ctx
->btarget
= env
->btarget
;
1613 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1615 TCGv_i32 texcp
= tcg_const_i32(excp
);
1616 TCGv_i32 terr
= tcg_const_i32(err
);
1617 save_cpu_state(ctx
, 1);
1618 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1619 tcg_temp_free_i32(terr
);
1620 tcg_temp_free_i32(texcp
);
1621 ctx
->bstate
= BS_EXCP
;
1624 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1626 gen_helper_0e0i(raise_exception
, excp
);
1629 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1631 generate_exception_err(ctx
, excp
, 0);
1634 /* Floating point register moves. */
1635 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1637 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1638 generate_exception(ctx
, EXCP_RI
);
1640 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1643 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1646 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1647 generate_exception(ctx
, EXCP_RI
);
1649 t64
= tcg_temp_new_i64();
1650 tcg_gen_extu_i32_i64(t64
, t
);
1651 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1652 tcg_temp_free_i64(t64
);
1655 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1657 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1658 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1660 gen_load_fpr32(ctx
, t
, reg
| 1);
1664 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1666 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1667 TCGv_i64 t64
= tcg_temp_new_i64();
1668 tcg_gen_extu_i32_i64(t64
, t
);
1669 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1670 tcg_temp_free_i64(t64
);
1672 gen_store_fpr32(ctx
, t
, reg
| 1);
1676 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1681 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1685 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1692 t0
= tcg_temp_new_i64();
1693 tcg_gen_shri_i64(t0
, t
, 32);
1694 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1695 tcg_temp_free_i64(t0
);
1699 static inline int get_fp_bit (int cc
)
1707 /* Addresses computation */
1708 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1710 tcg_gen_add_tl(ret
, arg0
, arg1
);
1712 #if defined(TARGET_MIPS64)
1713 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1714 tcg_gen_ext32s_i64(ret
, ret
);
1719 /* Addresses computation (translation time) */
1720 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1723 target_long sum
= base
+ offset
;
1725 #if defined(TARGET_MIPS64)
1726 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1733 /* Sign-extract the low 32-bits to a target_long. */
1734 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1736 #if defined(TARGET_MIPS64)
1737 tcg_gen_ext32s_i64(ret
, arg
);
1739 tcg_gen_extrl_i64_i32(ret
, arg
);
1743 /* Sign-extract the high 32-bits to a target_long. */
1744 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1746 #if defined(TARGET_MIPS64)
1747 tcg_gen_sari_i64(ret
, arg
, 32);
1749 tcg_gen_extrh_i64_i32(ret
, arg
);
1753 static inline void check_cp0_enabled(DisasContext
*ctx
)
1755 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1756 generate_exception_err(ctx
, EXCP_CpU
, 0);
1759 static inline void check_cp1_enabled(DisasContext
*ctx
)
1761 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1762 generate_exception_err(ctx
, EXCP_CpU
, 1);
1765 /* Verify that the processor is running with COP1X instructions enabled.
1766 This is associated with the nabla symbol in the MIPS32 and MIPS64
1769 static inline void check_cop1x(DisasContext
*ctx
)
1771 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1772 generate_exception_end(ctx
, EXCP_RI
);
1775 /* Verify that the processor is running with 64-bit floating-point
1776 operations enabled. */
1778 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1780 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1781 generate_exception_end(ctx
, EXCP_RI
);
1785 * Verify if floating point register is valid; an operation is not defined
1786 * if bit 0 of any register specification is set and the FR bit in the
1787 * Status register equals zero, since the register numbers specify an
1788 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1789 * in the Status register equals one, both even and odd register numbers
1790 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1792 * Multiple 64 bit wide registers can be checked by calling
1793 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1795 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1798 generate_exception_end(ctx
, EXCP_RI
);
1801 /* Verify that the processor is running with DSP instructions enabled.
1802 This is enabled by CP0 Status register MX(24) bit.
1805 static inline void check_dsp(DisasContext
*ctx
)
1807 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1808 if (ctx
->insn_flags
& ASE_DSP
) {
1809 generate_exception_end(ctx
, EXCP_DSPDIS
);
1811 generate_exception_end(ctx
, EXCP_RI
);
1816 static inline void check_dspr2(DisasContext
*ctx
)
1818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1819 if (ctx
->insn_flags
& ASE_DSP
) {
1820 generate_exception_end(ctx
, EXCP_DSPDIS
);
1822 generate_exception_end(ctx
, EXCP_RI
);
1827 /* This code generates a "reserved instruction" exception if the
1828 CPU does not support the instruction set corresponding to flags. */
1829 static inline void check_insn(DisasContext
*ctx
, int flags
)
1831 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1832 generate_exception_end(ctx
, EXCP_RI
);
1836 /* This code generates a "reserved instruction" exception if the
1837 CPU has corresponding flag set which indicates that the instruction
1838 has been removed. */
1839 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1841 if (unlikely(ctx
->insn_flags
& flags
)) {
1842 generate_exception_end(ctx
, EXCP_RI
);
1846 /* This code generates a "reserved instruction" exception if the
1847 CPU does not support 64-bit paired-single (PS) floating point data type */
1848 static inline void check_ps(DisasContext
*ctx
)
1850 if (unlikely(!ctx
->ps
)) {
1851 generate_exception(ctx
, EXCP_RI
);
1853 check_cp1_64bitmode(ctx
);
1856 #ifdef TARGET_MIPS64
1857 /* This code generates a "reserved instruction" exception if 64-bit
1858 instructions are not enabled. */
1859 static inline void check_mips_64(DisasContext
*ctx
)
1861 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1862 generate_exception_end(ctx
, EXCP_RI
);
1866 #ifndef CONFIG_USER_ONLY
1867 static inline void check_mvh(DisasContext
*ctx
)
1869 if (unlikely(!ctx
->mvh
)) {
1870 generate_exception(ctx
, EXCP_RI
);
1875 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1876 calling interface for 32 and 64-bit FPRs. No sense in changing
1877 all callers for gen_load_fpr32 when we need the CTX parameter for
1879 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1880 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1881 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1882 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1883 int ft, int fs, int cc) \
1885 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1886 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1895 check_cp1_registers(ctx, fs | ft); \
1903 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1904 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1906 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1907 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1908 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1909 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1910 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1911 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1912 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1913 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1914 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1915 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1916 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1917 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1918 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1919 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1920 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1921 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1924 tcg_temp_free_i##bits (fp0); \
1925 tcg_temp_free_i##bits (fp1); \
1928 FOP_CONDS(, 0, d
, FMT_D
, 64)
1929 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1930 FOP_CONDS(, 0, s
, FMT_S
, 32)
1931 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1932 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1933 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1936 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1937 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1938 int ft, int fs, int fd) \
1940 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1941 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1942 if (ifmt == FMT_D) { \
1943 check_cp1_registers(ctx, fs | ft | fd); \
1945 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1946 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1949 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2018 tcg_temp_free_i ## bits (fp0); \
2019 tcg_temp_free_i ## bits (fp1); \
2022 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2023 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2025 #undef gen_ldcmp_fpr32
2026 #undef gen_ldcmp_fpr64
2028 /* load/store instructions. */
2029 #ifdef CONFIG_USER_ONLY
2030 #define OP_LD_ATOMIC(insn,fname) \
2031 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2033 TCGv t0 = tcg_temp_new(); \
2034 tcg_gen_mov_tl(t0, arg1); \
2035 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2036 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2037 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2038 tcg_temp_free(t0); \
2041 #define OP_LD_ATOMIC(insn,fname) \
2042 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2044 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2047 OP_LD_ATOMIC(ll
,ld32s
);
2048 #if defined(TARGET_MIPS64)
2049 OP_LD_ATOMIC(lld
,ld64
);
2053 #ifdef CONFIG_USER_ONLY
2054 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2055 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2057 TCGv t0 = tcg_temp_new(); \
2058 TCGLabel *l1 = gen_new_label(); \
2059 TCGLabel *l2 = gen_new_label(); \
2061 tcg_gen_andi_tl(t0, arg2, almask); \
2062 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2063 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2064 generate_exception(ctx, EXCP_AdES); \
2065 gen_set_label(l1); \
2066 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2067 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2068 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2069 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2070 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2071 generate_exception_end(ctx, EXCP_SC); \
2072 gen_set_label(l2); \
2073 tcg_gen_movi_tl(t0, 0); \
2074 gen_store_gpr(t0, rt); \
2075 tcg_temp_free(t0); \
2078 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2079 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2081 TCGv t0 = tcg_temp_new(); \
2082 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2083 gen_store_gpr(t0, rt); \
2084 tcg_temp_free(t0); \
2087 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2088 #if defined(TARGET_MIPS64)
2089 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2093 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2094 int base
, int16_t offset
)
2097 tcg_gen_movi_tl(addr
, offset
);
2098 } else if (offset
== 0) {
2099 gen_load_gpr(addr
, base
);
2101 tcg_gen_movi_tl(addr
, offset
);
2102 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2106 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2108 target_ulong pc
= ctx
->pc
;
2110 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2111 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2116 pc
&= ~(target_ulong
)3;
2121 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2122 int rt
, int base
, int16_t offset
)
2126 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2127 /* Loongson CPU uses a load to zero register for prefetch.
2128 We emulate it as a NOP. On other CPU we must perform the
2129 actual memory access. */
2133 t0
= tcg_temp_new();
2134 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2137 #if defined(TARGET_MIPS64)
2139 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2140 ctx
->default_tcg_memop_mask
);
2141 gen_store_gpr(t0
, rt
);
2144 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2145 ctx
->default_tcg_memop_mask
);
2146 gen_store_gpr(t0
, rt
);
2150 op_ld_lld(t0
, t0
, ctx
);
2151 gen_store_gpr(t0
, rt
);
2154 t1
= tcg_temp_new();
2155 /* Do a byte access to possibly trigger a page
2156 fault with the unaligned address. */
2157 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2158 tcg_gen_andi_tl(t1
, t0
, 7);
2159 #ifndef TARGET_WORDS_BIGENDIAN
2160 tcg_gen_xori_tl(t1
, t1
, 7);
2162 tcg_gen_shli_tl(t1
, t1
, 3);
2163 tcg_gen_andi_tl(t0
, t0
, ~7);
2164 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2165 tcg_gen_shl_tl(t0
, t0
, t1
);
2166 t2
= tcg_const_tl(-1);
2167 tcg_gen_shl_tl(t2
, t2
, t1
);
2168 gen_load_gpr(t1
, rt
);
2169 tcg_gen_andc_tl(t1
, t1
, t2
);
2171 tcg_gen_or_tl(t0
, t0
, t1
);
2173 gen_store_gpr(t0
, rt
);
2176 t1
= tcg_temp_new();
2177 /* Do a byte access to possibly trigger a page
2178 fault with the unaligned address. */
2179 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2180 tcg_gen_andi_tl(t1
, t0
, 7);
2181 #ifdef TARGET_WORDS_BIGENDIAN
2182 tcg_gen_xori_tl(t1
, t1
, 7);
2184 tcg_gen_shli_tl(t1
, t1
, 3);
2185 tcg_gen_andi_tl(t0
, t0
, ~7);
2186 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2187 tcg_gen_shr_tl(t0
, t0
, t1
);
2188 tcg_gen_xori_tl(t1
, t1
, 63);
2189 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2190 tcg_gen_shl_tl(t2
, t2
, t1
);
2191 gen_load_gpr(t1
, rt
);
2192 tcg_gen_and_tl(t1
, t1
, t2
);
2194 tcg_gen_or_tl(t0
, t0
, t1
);
2196 gen_store_gpr(t0
, rt
);
2199 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2200 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2203 gen_store_gpr(t0
, rt
);
2207 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2208 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2211 gen_store_gpr(t0
, rt
);
2214 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2215 ctx
->default_tcg_memop_mask
);
2216 gen_store_gpr(t0
, rt
);
2219 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2220 ctx
->default_tcg_memop_mask
);
2221 gen_store_gpr(t0
, rt
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2225 ctx
->default_tcg_memop_mask
);
2226 gen_store_gpr(t0
, rt
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2230 gen_store_gpr(t0
, rt
);
2233 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2234 gen_store_gpr(t0
, rt
);
2237 t1
= tcg_temp_new();
2238 /* Do a byte access to possibly trigger a page
2239 fault with the unaligned address. */
2240 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2241 tcg_gen_andi_tl(t1
, t0
, 3);
2242 #ifndef TARGET_WORDS_BIGENDIAN
2243 tcg_gen_xori_tl(t1
, t1
, 3);
2245 tcg_gen_shli_tl(t1
, t1
, 3);
2246 tcg_gen_andi_tl(t0
, t0
, ~3);
2247 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2248 tcg_gen_shl_tl(t0
, t0
, t1
);
2249 t2
= tcg_const_tl(-1);
2250 tcg_gen_shl_tl(t2
, t2
, t1
);
2251 gen_load_gpr(t1
, rt
);
2252 tcg_gen_andc_tl(t1
, t1
, t2
);
2254 tcg_gen_or_tl(t0
, t0
, t1
);
2256 tcg_gen_ext32s_tl(t0
, t0
);
2257 gen_store_gpr(t0
, rt
);
2260 t1
= tcg_temp_new();
2261 /* Do a byte access to possibly trigger a page
2262 fault with the unaligned address. */
2263 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2264 tcg_gen_andi_tl(t1
, t0
, 3);
2265 #ifdef TARGET_WORDS_BIGENDIAN
2266 tcg_gen_xori_tl(t1
, t1
, 3);
2268 tcg_gen_shli_tl(t1
, t1
, 3);
2269 tcg_gen_andi_tl(t0
, t0
, ~3);
2270 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2271 tcg_gen_shr_tl(t0
, t0
, t1
);
2272 tcg_gen_xori_tl(t1
, t1
, 31);
2273 t2
= tcg_const_tl(0xfffffffeull
);
2274 tcg_gen_shl_tl(t2
, t2
, t1
);
2275 gen_load_gpr(t1
, rt
);
2276 tcg_gen_and_tl(t1
, t1
, t2
);
2278 tcg_gen_or_tl(t0
, t0
, t1
);
2280 tcg_gen_ext32s_tl(t0
, t0
);
2281 gen_store_gpr(t0
, rt
);
2285 op_ld_ll(t0
, t0
, ctx
);
2286 gen_store_gpr(t0
, rt
);
2293 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2294 int base
, int16_t offset
)
2296 TCGv t0
= tcg_temp_new();
2297 TCGv t1
= tcg_temp_new();
2299 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2300 gen_load_gpr(t1
, rt
);
2302 #if defined(TARGET_MIPS64)
2304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2305 ctx
->default_tcg_memop_mask
);
2308 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2311 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2316 ctx
->default_tcg_memop_mask
);
2319 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2320 ctx
->default_tcg_memop_mask
);
2323 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2326 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2329 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2337 /* Store conditional */
2338 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2339 int base
, int16_t offset
)
2343 #ifdef CONFIG_USER_ONLY
2344 t0
= tcg_temp_local_new();
2345 t1
= tcg_temp_local_new();
2347 t0
= tcg_temp_new();
2348 t1
= tcg_temp_new();
2350 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2351 gen_load_gpr(t1
, rt
);
2353 #if defined(TARGET_MIPS64)
2356 op_st_scd(t1
, t0
, rt
, ctx
);
2361 op_st_sc(t1
, t0
, rt
, ctx
);
2368 /* Load and store */
2369 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2370 int base
, int16_t offset
)
2372 TCGv t0
= tcg_temp_new();
2374 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2375 /* Don't do NOP if destination is zero: we must perform the actual
2380 TCGv_i32 fp0
= tcg_temp_new_i32();
2381 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2382 ctx
->default_tcg_memop_mask
);
2383 gen_store_fpr32(ctx
, fp0
, ft
);
2384 tcg_temp_free_i32(fp0
);
2389 TCGv_i32 fp0
= tcg_temp_new_i32();
2390 gen_load_fpr32(ctx
, fp0
, ft
);
2391 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2392 ctx
->default_tcg_memop_mask
);
2393 tcg_temp_free_i32(fp0
);
2398 TCGv_i64 fp0
= tcg_temp_new_i64();
2399 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2400 ctx
->default_tcg_memop_mask
);
2401 gen_store_fpr64(ctx
, fp0
, ft
);
2402 tcg_temp_free_i64(fp0
);
2407 TCGv_i64 fp0
= tcg_temp_new_i64();
2408 gen_load_fpr64(ctx
, fp0
, ft
);
2409 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2410 ctx
->default_tcg_memop_mask
);
2411 tcg_temp_free_i64(fp0
);
2415 MIPS_INVAL("flt_ldst");
2416 generate_exception_end(ctx
, EXCP_RI
);
2423 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2424 int rs
, int16_t imm
)
2426 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2427 check_cp1_enabled(ctx
);
2431 check_insn(ctx
, ISA_MIPS2
);
2434 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2437 generate_exception_err(ctx
, EXCP_CpU
, 1);
2441 /* Arithmetic with immediate operand */
2442 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2443 int rt
, int rs
, int16_t imm
)
2445 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2447 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2448 /* If no destination, treat it as a NOP.
2449 For addi, we must generate the overflow exception when needed. */
2455 TCGv t0
= tcg_temp_local_new();
2456 TCGv t1
= tcg_temp_new();
2457 TCGv t2
= tcg_temp_new();
2458 TCGLabel
*l1
= gen_new_label();
2460 gen_load_gpr(t1
, rs
);
2461 tcg_gen_addi_tl(t0
, t1
, uimm
);
2462 tcg_gen_ext32s_tl(t0
, t0
);
2464 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2465 tcg_gen_xori_tl(t2
, t0
, uimm
);
2466 tcg_gen_and_tl(t1
, t1
, t2
);
2468 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2470 /* operands of same sign, result different sign */
2471 generate_exception(ctx
, EXCP_OVERFLOW
);
2473 tcg_gen_ext32s_tl(t0
, t0
);
2474 gen_store_gpr(t0
, rt
);
2480 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2481 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2483 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2486 #if defined(TARGET_MIPS64)
2489 TCGv t0
= tcg_temp_local_new();
2490 TCGv t1
= tcg_temp_new();
2491 TCGv t2
= tcg_temp_new();
2492 TCGLabel
*l1
= gen_new_label();
2494 gen_load_gpr(t1
, rs
);
2495 tcg_gen_addi_tl(t0
, t1
, uimm
);
2497 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2498 tcg_gen_xori_tl(t2
, t0
, uimm
);
2499 tcg_gen_and_tl(t1
, t1
, t2
);
2501 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2503 /* operands of same sign, result different sign */
2504 generate_exception(ctx
, EXCP_OVERFLOW
);
2506 gen_store_gpr(t0
, rt
);
2512 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2514 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2521 /* Logic with immediate operand */
2522 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2523 int rt
, int rs
, int16_t imm
)
2528 /* If no destination, treat it as a NOP. */
2531 uimm
= (uint16_t)imm
;
2534 if (likely(rs
!= 0))
2535 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2537 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2541 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2543 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2546 if (likely(rs
!= 0))
2547 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2549 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2552 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2554 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2555 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2557 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2566 /* Set on less than with immediate operand */
2567 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2568 int rt
, int rs
, int16_t imm
)
2570 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2574 /* If no destination, treat it as a NOP. */
2577 t0
= tcg_temp_new();
2578 gen_load_gpr(t0
, rs
);
2581 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2584 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2590 /* Shifts with immediate operand */
2591 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2592 int rt
, int rs
, int16_t imm
)
2594 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2598 /* If no destination, treat it as a NOP. */
2602 t0
= tcg_temp_new();
2603 gen_load_gpr(t0
, rs
);
2606 tcg_gen_shli_tl(t0
, t0
, uimm
);
2607 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2610 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2614 tcg_gen_ext32u_tl(t0
, t0
);
2615 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2617 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2622 TCGv_i32 t1
= tcg_temp_new_i32();
2624 tcg_gen_trunc_tl_i32(t1
, t0
);
2625 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2626 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2627 tcg_temp_free_i32(t1
);
2629 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2632 #if defined(TARGET_MIPS64)
2634 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2640 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2644 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2646 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2650 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2653 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2659 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2667 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2668 int rd
, int rs
, int rt
)
2670 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2671 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2672 /* If no destination, treat it as a NOP.
2673 For add & sub, we must generate the overflow exception when needed. */
2680 TCGv t0
= tcg_temp_local_new();
2681 TCGv t1
= tcg_temp_new();
2682 TCGv t2
= tcg_temp_new();
2683 TCGLabel
*l1
= gen_new_label();
2685 gen_load_gpr(t1
, rs
);
2686 gen_load_gpr(t2
, rt
);
2687 tcg_gen_add_tl(t0
, t1
, t2
);
2688 tcg_gen_ext32s_tl(t0
, t0
);
2689 tcg_gen_xor_tl(t1
, t1
, t2
);
2690 tcg_gen_xor_tl(t2
, t0
, t2
);
2691 tcg_gen_andc_tl(t1
, t2
, t1
);
2693 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2695 /* operands of same sign, result different sign */
2696 generate_exception(ctx
, EXCP_OVERFLOW
);
2698 gen_store_gpr(t0
, rd
);
2703 if (rs
!= 0 && rt
!= 0) {
2704 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2705 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2706 } else if (rs
== 0 && rt
!= 0) {
2707 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2708 } else if (rs
!= 0 && rt
== 0) {
2709 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2711 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2716 TCGv t0
= tcg_temp_local_new();
2717 TCGv t1
= tcg_temp_new();
2718 TCGv t2
= tcg_temp_new();
2719 TCGLabel
*l1
= gen_new_label();
2721 gen_load_gpr(t1
, rs
);
2722 gen_load_gpr(t2
, rt
);
2723 tcg_gen_sub_tl(t0
, t1
, t2
);
2724 tcg_gen_ext32s_tl(t0
, t0
);
2725 tcg_gen_xor_tl(t2
, t1
, t2
);
2726 tcg_gen_xor_tl(t1
, t0
, t1
);
2727 tcg_gen_and_tl(t1
, t1
, t2
);
2729 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2731 /* operands of different sign, first operand and result different sign */
2732 generate_exception(ctx
, EXCP_OVERFLOW
);
2734 gen_store_gpr(t0
, rd
);
2739 if (rs
!= 0 && rt
!= 0) {
2740 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2741 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2742 } else if (rs
== 0 && rt
!= 0) {
2743 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2745 } else if (rs
!= 0 && rt
== 0) {
2746 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2751 #if defined(TARGET_MIPS64)
2754 TCGv t0
= tcg_temp_local_new();
2755 TCGv t1
= tcg_temp_new();
2756 TCGv t2
= tcg_temp_new();
2757 TCGLabel
*l1
= gen_new_label();
2759 gen_load_gpr(t1
, rs
);
2760 gen_load_gpr(t2
, rt
);
2761 tcg_gen_add_tl(t0
, t1
, t2
);
2762 tcg_gen_xor_tl(t1
, t1
, t2
);
2763 tcg_gen_xor_tl(t2
, t0
, t2
);
2764 tcg_gen_andc_tl(t1
, t2
, t1
);
2766 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2768 /* operands of same sign, result different sign */
2769 generate_exception(ctx
, EXCP_OVERFLOW
);
2771 gen_store_gpr(t0
, rd
);
2776 if (rs
!= 0 && rt
!= 0) {
2777 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2778 } else if (rs
== 0 && rt
!= 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2780 } else if (rs
!= 0 && rt
== 0) {
2781 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2783 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2788 TCGv t0
= tcg_temp_local_new();
2789 TCGv t1
= tcg_temp_new();
2790 TCGv t2
= tcg_temp_new();
2791 TCGLabel
*l1
= gen_new_label();
2793 gen_load_gpr(t1
, rs
);
2794 gen_load_gpr(t2
, rt
);
2795 tcg_gen_sub_tl(t0
, t1
, t2
);
2796 tcg_gen_xor_tl(t2
, t1
, t2
);
2797 tcg_gen_xor_tl(t1
, t0
, t1
);
2798 tcg_gen_and_tl(t1
, t1
, t2
);
2800 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2802 /* operands of different sign, first operand and result different sign */
2803 generate_exception(ctx
, EXCP_OVERFLOW
);
2805 gen_store_gpr(t0
, rd
);
2810 if (rs
!= 0 && rt
!= 0) {
2811 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2812 } else if (rs
== 0 && rt
!= 0) {
2813 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2814 } else if (rs
!= 0 && rt
== 0) {
2815 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2822 if (likely(rs
!= 0 && rt
!= 0)) {
2823 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2824 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2832 /* Conditional move */
2833 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2834 int rd
, int rs
, int rt
)
2839 /* If no destination, treat it as a NOP. */
2843 t0
= tcg_temp_new();
2844 gen_load_gpr(t0
, rt
);
2845 t1
= tcg_const_tl(0);
2846 t2
= tcg_temp_new();
2847 gen_load_gpr(t2
, rs
);
2850 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2853 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2856 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2859 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2868 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2869 int rd
, int rs
, int rt
)
2872 /* If no destination, treat it as a NOP. */
2878 if (likely(rs
!= 0 && rt
!= 0)) {
2879 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2881 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2885 if (rs
!= 0 && rt
!= 0) {
2886 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2887 } else if (rs
== 0 && rt
!= 0) {
2888 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2889 } else if (rs
!= 0 && rt
== 0) {
2890 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2896 if (likely(rs
!= 0 && rt
!= 0)) {
2897 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2898 } else if (rs
== 0 && rt
!= 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2900 } else if (rs
!= 0 && rt
== 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2903 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2907 if (likely(rs
!= 0 && rt
!= 0)) {
2908 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2909 } else if (rs
== 0 && rt
!= 0) {
2910 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2911 } else if (rs
!= 0 && rt
== 0) {
2912 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2914 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2920 /* Set on lower than */
2921 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2922 int rd
, int rs
, int rt
)
2927 /* If no destination, treat it as a NOP. */
2931 t0
= tcg_temp_new();
2932 t1
= tcg_temp_new();
2933 gen_load_gpr(t0
, rs
);
2934 gen_load_gpr(t1
, rt
);
2937 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2940 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2948 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2949 int rd
, int rs
, int rt
)
2954 /* If no destination, treat it as a NOP.
2955 For add & sub, we must generate the overflow exception when needed. */
2959 t0
= tcg_temp_new();
2960 t1
= tcg_temp_new();
2961 gen_load_gpr(t0
, rs
);
2962 gen_load_gpr(t1
, rt
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_shl_tl(t0
, t1
, t0
);
2967 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2970 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2971 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2974 tcg_gen_ext32u_tl(t1
, t1
);
2975 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2976 tcg_gen_shr_tl(t0
, t1
, t0
);
2977 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2981 TCGv_i32 t2
= tcg_temp_new_i32();
2982 TCGv_i32 t3
= tcg_temp_new_i32();
2984 tcg_gen_trunc_tl_i32(t2
, t0
);
2985 tcg_gen_trunc_tl_i32(t3
, t1
);
2986 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2987 tcg_gen_rotr_i32(t2
, t3
, t2
);
2988 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2989 tcg_temp_free_i32(t2
);
2990 tcg_temp_free_i32(t3
);
2993 #if defined(TARGET_MIPS64)
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3003 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3004 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3007 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3008 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3016 /* Arithmetic on HI/LO registers */
3017 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3019 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3030 #if defined(TARGET_MIPS64)
3032 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3036 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3040 #if defined(TARGET_MIPS64)
3042 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3046 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3051 #if defined(TARGET_MIPS64)
3053 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3057 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3060 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3065 #if defined(TARGET_MIPS64)
3067 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3071 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3074 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3080 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3083 TCGv t0
= tcg_const_tl(addr
);
3084 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3085 gen_store_gpr(t0
, reg
);
3089 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3095 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3098 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3099 addr
= addr_add(ctx
, pc
, offset
);
3100 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3104 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3105 addr
= addr_add(ctx
, pc
, offset
);
3106 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3108 #if defined(TARGET_MIPS64)
3111 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3112 addr
= addr_add(ctx
, pc
, offset
);
3113 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3117 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3120 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3121 addr
= addr_add(ctx
, pc
, offset
);
3122 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3127 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3128 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3129 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3132 #if defined(TARGET_MIPS64)
3133 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3134 case R6_OPC_LDPC
+ (1 << 16):
3135 case R6_OPC_LDPC
+ (2 << 16):
3136 case R6_OPC_LDPC
+ (3 << 16):
3138 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3139 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3140 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3144 MIPS_INVAL("OPC_PCREL");
3145 generate_exception_end(ctx
, EXCP_RI
);
3152 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3161 t0
= tcg_temp_new();
3162 t1
= tcg_temp_new();
3164 gen_load_gpr(t0
, rs
);
3165 gen_load_gpr(t1
, rt
);
3170 TCGv t2
= tcg_temp_new();
3171 TCGv t3
= tcg_temp_new();
3172 tcg_gen_ext32s_tl(t0
, t0
);
3173 tcg_gen_ext32s_tl(t1
, t1
);
3174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3176 tcg_gen_and_tl(t2
, t2
, t3
);
3177 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3178 tcg_gen_or_tl(t2
, t2
, t3
);
3179 tcg_gen_movi_tl(t3
, 0);
3180 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3181 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3182 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3189 TCGv t2
= tcg_temp_new();
3190 TCGv t3
= tcg_temp_new();
3191 tcg_gen_ext32s_tl(t0
, t0
);
3192 tcg_gen_ext32s_tl(t1
, t1
);
3193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3195 tcg_gen_and_tl(t2
, t2
, t3
);
3196 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3197 tcg_gen_or_tl(t2
, t2
, t3
);
3198 tcg_gen_movi_tl(t3
, 0);
3199 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3200 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3201 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3208 TCGv t2
= tcg_const_tl(0);
3209 TCGv t3
= tcg_const_tl(1);
3210 tcg_gen_ext32u_tl(t0
, t0
);
3211 tcg_gen_ext32u_tl(t1
, t1
);
3212 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3213 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3214 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3221 TCGv t2
= tcg_const_tl(0);
3222 TCGv t3
= tcg_const_tl(1);
3223 tcg_gen_ext32u_tl(t0
, t0
);
3224 tcg_gen_ext32u_tl(t1
, t1
);
3225 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3226 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3227 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3234 TCGv_i32 t2
= tcg_temp_new_i32();
3235 TCGv_i32 t3
= tcg_temp_new_i32();
3236 tcg_gen_trunc_tl_i32(t2
, t0
);
3237 tcg_gen_trunc_tl_i32(t3
, t1
);
3238 tcg_gen_mul_i32(t2
, t2
, t3
);
3239 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3240 tcg_temp_free_i32(t2
);
3241 tcg_temp_free_i32(t3
);
3246 TCGv_i32 t2
= tcg_temp_new_i32();
3247 TCGv_i32 t3
= tcg_temp_new_i32();
3248 tcg_gen_trunc_tl_i32(t2
, t0
);
3249 tcg_gen_trunc_tl_i32(t3
, t1
);
3250 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3251 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3252 tcg_temp_free_i32(t2
);
3253 tcg_temp_free_i32(t3
);
3258 TCGv_i32 t2
= tcg_temp_new_i32();
3259 TCGv_i32 t3
= tcg_temp_new_i32();
3260 tcg_gen_trunc_tl_i32(t2
, t0
);
3261 tcg_gen_trunc_tl_i32(t3
, t1
);
3262 tcg_gen_mul_i32(t2
, t2
, t3
);
3263 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3264 tcg_temp_free_i32(t2
);
3265 tcg_temp_free_i32(t3
);
3270 TCGv_i32 t2
= tcg_temp_new_i32();
3271 TCGv_i32 t3
= tcg_temp_new_i32();
3272 tcg_gen_trunc_tl_i32(t2
, t0
);
3273 tcg_gen_trunc_tl_i32(t3
, t1
);
3274 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3275 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3276 tcg_temp_free_i32(t2
);
3277 tcg_temp_free_i32(t3
);
3280 #if defined(TARGET_MIPS64)
3283 TCGv t2
= tcg_temp_new();
3284 TCGv t3
= tcg_temp_new();
3285 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3287 tcg_gen_and_tl(t2
, t2
, t3
);
3288 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3289 tcg_gen_or_tl(t2
, t2
, t3
);
3290 tcg_gen_movi_tl(t3
, 0);
3291 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3292 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3299 TCGv t2
= tcg_temp_new();
3300 TCGv t3
= tcg_temp_new();
3301 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3303 tcg_gen_and_tl(t2
, t2
, t3
);
3304 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3305 tcg_gen_or_tl(t2
, t2
, t3
);
3306 tcg_gen_movi_tl(t3
, 0);
3307 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3308 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3315 TCGv t2
= tcg_const_tl(0);
3316 TCGv t3
= tcg_const_tl(1);
3317 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3318 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3325 TCGv t2
= tcg_const_tl(0);
3326 TCGv t3
= tcg_const_tl(1);
3327 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3328 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3334 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3338 TCGv t2
= tcg_temp_new();
3339 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3344 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3348 TCGv t2
= tcg_temp_new();
3349 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3355 MIPS_INVAL("r6 mul/div");
3356 generate_exception_end(ctx
, EXCP_RI
);
3364 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3365 int acc
, int rs
, int rt
)
3369 t0
= tcg_temp_new();
3370 t1
= tcg_temp_new();
3372 gen_load_gpr(t0
, rs
);
3373 gen_load_gpr(t1
, rt
);
3382 TCGv t2
= tcg_temp_new();
3383 TCGv t3
= tcg_temp_new();
3384 tcg_gen_ext32s_tl(t0
, t0
);
3385 tcg_gen_ext32s_tl(t1
, t1
);
3386 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3388 tcg_gen_and_tl(t2
, t2
, t3
);
3389 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3390 tcg_gen_or_tl(t2
, t2
, t3
);
3391 tcg_gen_movi_tl(t3
, 0);
3392 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3393 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3394 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3395 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3396 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3403 TCGv t2
= tcg_const_tl(0);
3404 TCGv t3
= tcg_const_tl(1);
3405 tcg_gen_ext32u_tl(t0
, t0
);
3406 tcg_gen_ext32u_tl(t1
, t1
);
3407 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3408 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3409 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3410 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3411 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3418 TCGv_i32 t2
= tcg_temp_new_i32();
3419 TCGv_i32 t3
= tcg_temp_new_i32();
3420 tcg_gen_trunc_tl_i32(t2
, t0
);
3421 tcg_gen_trunc_tl_i32(t3
, t1
);
3422 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3423 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3424 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3425 tcg_temp_free_i32(t2
);
3426 tcg_temp_free_i32(t3
);
3431 TCGv_i32 t2
= tcg_temp_new_i32();
3432 TCGv_i32 t3
= tcg_temp_new_i32();
3433 tcg_gen_trunc_tl_i32(t2
, t0
);
3434 tcg_gen_trunc_tl_i32(t3
, t1
);
3435 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3436 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3437 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3438 tcg_temp_free_i32(t2
);
3439 tcg_temp_free_i32(t3
);
3442 #if defined(TARGET_MIPS64)
3445 TCGv t2
= tcg_temp_new();
3446 TCGv t3
= tcg_temp_new();
3447 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3449 tcg_gen_and_tl(t2
, t2
, t3
);
3450 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3451 tcg_gen_or_tl(t2
, t2
, t3
);
3452 tcg_gen_movi_tl(t3
, 0);
3453 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3454 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3455 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3462 TCGv t2
= tcg_const_tl(0);
3463 TCGv t3
= tcg_const_tl(1);
3464 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3465 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3466 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3472 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3475 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3480 TCGv_i64 t2
= tcg_temp_new_i64();
3481 TCGv_i64 t3
= tcg_temp_new_i64();
3483 tcg_gen_ext_tl_i64(t2
, t0
);
3484 tcg_gen_ext_tl_i64(t3
, t1
);
3485 tcg_gen_mul_i64(t2
, t2
, t3
);
3486 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3487 tcg_gen_add_i64(t2
, t2
, t3
);
3488 tcg_temp_free_i64(t3
);
3489 gen_move_low32(cpu_LO
[acc
], t2
);
3490 gen_move_high32(cpu_HI
[acc
], t2
);
3491 tcg_temp_free_i64(t2
);
3496 TCGv_i64 t2
= tcg_temp_new_i64();
3497 TCGv_i64 t3
= tcg_temp_new_i64();
3499 tcg_gen_ext32u_tl(t0
, t0
);
3500 tcg_gen_ext32u_tl(t1
, t1
);
3501 tcg_gen_extu_tl_i64(t2
, t0
);
3502 tcg_gen_extu_tl_i64(t3
, t1
);
3503 tcg_gen_mul_i64(t2
, t2
, t3
);
3504 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3505 tcg_gen_add_i64(t2
, t2
, t3
);
3506 tcg_temp_free_i64(t3
);
3507 gen_move_low32(cpu_LO
[acc
], t2
);
3508 gen_move_high32(cpu_HI
[acc
], t2
);
3509 tcg_temp_free_i64(t2
);
3514 TCGv_i64 t2
= tcg_temp_new_i64();
3515 TCGv_i64 t3
= tcg_temp_new_i64();
3517 tcg_gen_ext_tl_i64(t2
, t0
);
3518 tcg_gen_ext_tl_i64(t3
, t1
);
3519 tcg_gen_mul_i64(t2
, t2
, t3
);
3520 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3521 tcg_gen_sub_i64(t2
, t3
, t2
);
3522 tcg_temp_free_i64(t3
);
3523 gen_move_low32(cpu_LO
[acc
], t2
);
3524 gen_move_high32(cpu_HI
[acc
], t2
);
3525 tcg_temp_free_i64(t2
);
3530 TCGv_i64 t2
= tcg_temp_new_i64();
3531 TCGv_i64 t3
= tcg_temp_new_i64();
3533 tcg_gen_ext32u_tl(t0
, t0
);
3534 tcg_gen_ext32u_tl(t1
, t1
);
3535 tcg_gen_extu_tl_i64(t2
, t0
);
3536 tcg_gen_extu_tl_i64(t3
, t1
);
3537 tcg_gen_mul_i64(t2
, t2
, t3
);
3538 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3539 tcg_gen_sub_i64(t2
, t3
, t2
);
3540 tcg_temp_free_i64(t3
);
3541 gen_move_low32(cpu_LO
[acc
], t2
);
3542 gen_move_high32(cpu_HI
[acc
], t2
);
3543 tcg_temp_free_i64(t2
);
3547 MIPS_INVAL("mul/div");
3548 generate_exception_end(ctx
, EXCP_RI
);
3556 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3557 int rd
, int rs
, int rt
)
3559 TCGv t0
= tcg_temp_new();
3560 TCGv t1
= tcg_temp_new();
3562 gen_load_gpr(t0
, rs
);
3563 gen_load_gpr(t1
, rt
);
3566 case OPC_VR54XX_MULS
:
3567 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MULSU
:
3570 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MACC
:
3573 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MACCU
:
3576 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MSAC
:
3579 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MSACU
:
3582 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULHI
:
3585 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MULHIU
:
3588 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MULSHI
:
3591 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MULSHIU
:
3594 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MACCHI
:
3597 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3599 case OPC_VR54XX_MACCHIU
:
3600 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3602 case OPC_VR54XX_MSACHI
:
3603 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3605 case OPC_VR54XX_MSACHIU
:
3606 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3609 MIPS_INVAL("mul vr54xx");
3610 generate_exception_end(ctx
, EXCP_RI
);
3613 gen_store_gpr(t0
, rd
);
3620 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3629 t0
= tcg_temp_new();
3630 gen_load_gpr(t0
, rs
);
3634 gen_helper_clo(cpu_gpr
[rd
], t0
);
3638 gen_helper_clz(cpu_gpr
[rd
], t0
);
3640 #if defined(TARGET_MIPS64)
3643 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3647 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3654 /* Godson integer instructions */
3655 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3656 int rd
, int rs
, int rt
)
3668 case OPC_MULTU_G_2E
:
3669 case OPC_MULTU_G_2F
:
3670 #if defined(TARGET_MIPS64)
3671 case OPC_DMULT_G_2E
:
3672 case OPC_DMULT_G_2F
:
3673 case OPC_DMULTU_G_2E
:
3674 case OPC_DMULTU_G_2F
:
3676 t0
= tcg_temp_new();
3677 t1
= tcg_temp_new();
3680 t0
= tcg_temp_local_new();
3681 t1
= tcg_temp_local_new();
3685 gen_load_gpr(t0
, rs
);
3686 gen_load_gpr(t1
, rt
);
3691 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3692 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3694 case OPC_MULTU_G_2E
:
3695 case OPC_MULTU_G_2F
:
3696 tcg_gen_ext32u_tl(t0
, t0
);
3697 tcg_gen_ext32u_tl(t1
, t1
);
3698 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3699 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3704 TCGLabel
*l1
= gen_new_label();
3705 TCGLabel
*l2
= gen_new_label();
3706 TCGLabel
*l3
= gen_new_label();
3707 tcg_gen_ext32s_tl(t0
, t0
);
3708 tcg_gen_ext32s_tl(t1
, t1
);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3710 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3713 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3714 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3715 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3718 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3719 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3726 TCGLabel
*l1
= gen_new_label();
3727 TCGLabel
*l2
= gen_new_label();
3728 tcg_gen_ext32u_tl(t0
, t0
);
3729 tcg_gen_ext32u_tl(t1
, t1
);
3730 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3731 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3734 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3735 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3742 TCGLabel
*l1
= gen_new_label();
3743 TCGLabel
*l2
= gen_new_label();
3744 TCGLabel
*l3
= gen_new_label();
3745 tcg_gen_ext32u_tl(t0
, t0
);
3746 tcg_gen_ext32u_tl(t1
, t1
);
3747 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3748 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3749 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3751 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3754 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3755 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3762 TCGLabel
*l1
= gen_new_label();
3763 TCGLabel
*l2
= gen_new_label();
3764 tcg_gen_ext32u_tl(t0
, t0
);
3765 tcg_gen_ext32u_tl(t1
, t1
);
3766 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3767 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3770 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3775 #if defined(TARGET_MIPS64)
3776 case OPC_DMULT_G_2E
:
3777 case OPC_DMULT_G_2F
:
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3780 case OPC_DMULTU_G_2E
:
3781 case OPC_DMULTU_G_2F
:
3782 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3787 TCGLabel
*l1
= gen_new_label();
3788 TCGLabel
*l2
= gen_new_label();
3789 TCGLabel
*l3
= gen_new_label();
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3791 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3794 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3795 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3796 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3799 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3803 case OPC_DDIVU_G_2E
:
3804 case OPC_DDIVU_G_2F
:
3806 TCGLabel
*l1
= gen_new_label();
3807 TCGLabel
*l2
= gen_new_label();
3808 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3809 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3812 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3819 TCGLabel
*l1
= gen_new_label();
3820 TCGLabel
*l2
= gen_new_label();
3821 TCGLabel
*l3
= gen_new_label();
3822 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3823 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3824 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3829 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3833 case OPC_DMODU_G_2E
:
3834 case OPC_DMODU_G_2F
:
3836 TCGLabel
*l1
= gen_new_label();
3837 TCGLabel
*l2
= gen_new_label();
3838 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3839 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3842 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3853 /* Loongson multimedia instructions */
3854 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3856 uint32_t opc
, shift_max
;
3859 opc
= MASK_LMI(ctx
->opcode
);
3865 t0
= tcg_temp_local_new_i64();
3866 t1
= tcg_temp_local_new_i64();
3869 t0
= tcg_temp_new_i64();
3870 t1
= tcg_temp_new_i64();
3874 gen_load_fpr64(ctx
, t0
, rs
);
3875 gen_load_fpr64(ctx
, t1
, rt
);
3877 #define LMI_HELPER(UP, LO) \
3878 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3879 #define LMI_HELPER_1(UP, LO) \
3880 case OPC_##UP: gen_helper_##LO(t0, t0); break
3881 #define LMI_DIRECT(UP, LO, OP) \
3882 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3885 LMI_HELPER(PADDSH
, paddsh
);
3886 LMI_HELPER(PADDUSH
, paddush
);
3887 LMI_HELPER(PADDH
, paddh
);
3888 LMI_HELPER(PADDW
, paddw
);
3889 LMI_HELPER(PADDSB
, paddsb
);
3890 LMI_HELPER(PADDUSB
, paddusb
);
3891 LMI_HELPER(PADDB
, paddb
);
3893 LMI_HELPER(PSUBSH
, psubsh
);
3894 LMI_HELPER(PSUBUSH
, psubush
);
3895 LMI_HELPER(PSUBH
, psubh
);
3896 LMI_HELPER(PSUBW
, psubw
);
3897 LMI_HELPER(PSUBSB
, psubsb
);
3898 LMI_HELPER(PSUBUSB
, psubusb
);
3899 LMI_HELPER(PSUBB
, psubb
);
3901 LMI_HELPER(PSHUFH
, pshufh
);
3902 LMI_HELPER(PACKSSWH
, packsswh
);
3903 LMI_HELPER(PACKSSHB
, packsshb
);
3904 LMI_HELPER(PACKUSHB
, packushb
);
3906 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3907 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3908 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3909 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3910 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3911 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3913 LMI_HELPER(PAVGH
, pavgh
);
3914 LMI_HELPER(PAVGB
, pavgb
);
3915 LMI_HELPER(PMAXSH
, pmaxsh
);
3916 LMI_HELPER(PMINSH
, pminsh
);
3917 LMI_HELPER(PMAXUB
, pmaxub
);
3918 LMI_HELPER(PMINUB
, pminub
);
3920 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3921 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3922 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3923 LMI_HELPER(PCMPGTH
, pcmpgth
);
3924 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3925 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3927 LMI_HELPER(PSLLW
, psllw
);
3928 LMI_HELPER(PSLLH
, psllh
);
3929 LMI_HELPER(PSRLW
, psrlw
);
3930 LMI_HELPER(PSRLH
, psrlh
);
3931 LMI_HELPER(PSRAW
, psraw
);
3932 LMI_HELPER(PSRAH
, psrah
);
3934 LMI_HELPER(PMULLH
, pmullh
);
3935 LMI_HELPER(PMULHH
, pmulhh
);
3936 LMI_HELPER(PMULHUH
, pmulhuh
);
3937 LMI_HELPER(PMADDHW
, pmaddhw
);
3939 LMI_HELPER(PASUBUB
, pasubub
);
3940 LMI_HELPER_1(BIADD
, biadd
);
3941 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3943 LMI_DIRECT(PADDD
, paddd
, add
);
3944 LMI_DIRECT(PSUBD
, psubd
, sub
);
3945 LMI_DIRECT(XOR_CP2
, xor, xor);
3946 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3947 LMI_DIRECT(AND_CP2
, and, and);
3948 LMI_DIRECT(PANDN
, pandn
, andc
);
3949 LMI_DIRECT(OR
, or, or);
3952 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3955 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3958 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3961 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3965 tcg_gen_andi_i64(t1
, t1
, 3);
3966 tcg_gen_shli_i64(t1
, t1
, 4);
3967 tcg_gen_shr_i64(t0
, t0
, t1
);
3968 tcg_gen_ext16u_i64(t0
, t0
);
3972 tcg_gen_add_i64(t0
, t0
, t1
);
3973 tcg_gen_ext32s_i64(t0
, t0
);
3976 tcg_gen_sub_i64(t0
, t0
, t1
);
3977 tcg_gen_ext32s_i64(t0
, t0
);
3999 /* Make sure shift count isn't TCG undefined behaviour. */
4000 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4005 tcg_gen_shl_i64(t0
, t0
, t1
);
4009 /* Since SRA is UndefinedResult without sign-extended inputs,
4010 we can treat SRA and DSRA the same. */
4011 tcg_gen_sar_i64(t0
, t0
, t1
);
4014 /* We want to shift in zeros for SRL; zero-extend first. */
4015 tcg_gen_ext32u_i64(t0
, t0
);
4018 tcg_gen_shr_i64(t0
, t0
, t1
);
4022 if (shift_max
== 32) {
4023 tcg_gen_ext32s_i64(t0
, t0
);
4026 /* Shifts larger than MAX produce zero. */
4027 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4028 tcg_gen_neg_i64(t1
, t1
);
4029 tcg_gen_and_i64(t0
, t0
, t1
);
4035 TCGv_i64 t2
= tcg_temp_new_i64();
4036 TCGLabel
*lab
= gen_new_label();
4038 tcg_gen_mov_i64(t2
, t0
);
4039 tcg_gen_add_i64(t0
, t1
, t2
);
4040 if (opc
== OPC_ADD_CP2
) {
4041 tcg_gen_ext32s_i64(t0
, t0
);
4043 tcg_gen_xor_i64(t1
, t1
, t2
);
4044 tcg_gen_xor_i64(t2
, t2
, t0
);
4045 tcg_gen_andc_i64(t1
, t2
, t1
);
4046 tcg_temp_free_i64(t2
);
4047 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4048 generate_exception(ctx
, EXCP_OVERFLOW
);
4056 TCGv_i64 t2
= tcg_temp_new_i64();
4057 TCGLabel
*lab
= gen_new_label();
4059 tcg_gen_mov_i64(t2
, t0
);
4060 tcg_gen_sub_i64(t0
, t1
, t2
);
4061 if (opc
== OPC_SUB_CP2
) {
4062 tcg_gen_ext32s_i64(t0
, t0
);
4064 tcg_gen_xor_i64(t1
, t1
, t2
);
4065 tcg_gen_xor_i64(t2
, t2
, t0
);
4066 tcg_gen_and_i64(t1
, t1
, t2
);
4067 tcg_temp_free_i64(t2
);
4068 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4069 generate_exception(ctx
, EXCP_OVERFLOW
);
4075 tcg_gen_ext32u_i64(t0
, t0
);
4076 tcg_gen_ext32u_i64(t1
, t1
);
4077 tcg_gen_mul_i64(t0
, t0
, t1
);
4086 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4087 FD field is the CC field? */
4089 MIPS_INVAL("loongson_cp2");
4090 generate_exception_end(ctx
, EXCP_RI
);
4097 gen_store_fpr64(ctx
, t0
, rd
);
4099 tcg_temp_free_i64(t0
);
4100 tcg_temp_free_i64(t1
);
4104 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4105 int rs
, int rt
, int16_t imm
)
4108 TCGv t0
= tcg_temp_new();
4109 TCGv t1
= tcg_temp_new();
4112 /* Load needed operands */
4120 /* Compare two registers */
4122 gen_load_gpr(t0
, rs
);
4123 gen_load_gpr(t1
, rt
);
4133 /* Compare register to immediate */
4134 if (rs
!= 0 || imm
!= 0) {
4135 gen_load_gpr(t0
, rs
);
4136 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4143 case OPC_TEQ
: /* rs == rs */
4144 case OPC_TEQI
: /* r0 == 0 */
4145 case OPC_TGE
: /* rs >= rs */
4146 case OPC_TGEI
: /* r0 >= 0 */
4147 case OPC_TGEU
: /* rs >= rs unsigned */
4148 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4150 generate_exception_end(ctx
, EXCP_TRAP
);
4152 case OPC_TLT
: /* rs < rs */
4153 case OPC_TLTI
: /* r0 < 0 */
4154 case OPC_TLTU
: /* rs < rs unsigned */
4155 case OPC_TLTIU
: /* r0 < 0 unsigned */
4156 case OPC_TNE
: /* rs != rs */
4157 case OPC_TNEI
: /* r0 != 0 */
4158 /* Never trap: treat as NOP. */
4162 TCGLabel
*l1
= gen_new_label();
4167 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4171 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4175 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4179 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4183 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4187 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4190 generate_exception(ctx
, EXCP_TRAP
);
4197 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4199 if (unlikely(ctx
->singlestep_enabled
)) {
4203 #ifndef CONFIG_USER_ONLY
4204 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4210 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4212 if (use_goto_tb(ctx
, dest
)) {
4215 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4218 if (ctx
->singlestep_enabled
) {
4219 save_cpu_state(ctx
, 0);
4220 gen_helper_raise_exception_debug(cpu_env
);
4226 /* Branches (before delay slot) */
4227 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4229 int rs
, int rt
, int32_t offset
,
4232 target_ulong btgt
= -1;
4234 int bcond_compute
= 0;
4235 TCGv t0
= tcg_temp_new();
4236 TCGv t1
= tcg_temp_new();
4238 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4239 #ifdef MIPS_DEBUG_DISAS
4240 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4241 TARGET_FMT_lx
"\n", ctx
->pc
);
4243 generate_exception_end(ctx
, EXCP_RI
);
4247 /* Load needed operands */
4253 /* Compare two registers */
4255 gen_load_gpr(t0
, rs
);
4256 gen_load_gpr(t1
, rt
);
4259 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4273 /* Compare to zero */
4275 gen_load_gpr(t0
, rs
);
4278 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4281 #if defined(TARGET_MIPS64)
4283 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4285 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4288 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4293 /* Jump to immediate */
4294 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4298 /* Jump to register */
4299 if (offset
!= 0 && offset
!= 16) {
4300 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4301 others are reserved. */
4302 MIPS_INVAL("jump hint");
4303 generate_exception_end(ctx
, EXCP_RI
);
4306 gen_load_gpr(btarget
, rs
);
4309 MIPS_INVAL("branch/jump");
4310 generate_exception_end(ctx
, EXCP_RI
);
4313 if (bcond_compute
== 0) {
4314 /* No condition to be computed */
4316 case OPC_BEQ
: /* rx == rx */
4317 case OPC_BEQL
: /* rx == rx likely */
4318 case OPC_BGEZ
: /* 0 >= 0 */
4319 case OPC_BGEZL
: /* 0 >= 0 likely */
4320 case OPC_BLEZ
: /* 0 <= 0 */
4321 case OPC_BLEZL
: /* 0 <= 0 likely */
4323 ctx
->hflags
|= MIPS_HFLAG_B
;
4325 case OPC_BGEZAL
: /* 0 >= 0 */
4326 case OPC_BGEZALL
: /* 0 >= 0 likely */
4327 /* Always take and link */
4329 ctx
->hflags
|= MIPS_HFLAG_B
;
4331 case OPC_BNE
: /* rx != rx */
4332 case OPC_BGTZ
: /* 0 > 0 */
4333 case OPC_BLTZ
: /* 0 < 0 */
4336 case OPC_BLTZAL
: /* 0 < 0 */
4337 /* Handle as an unconditional branch to get correct delay
4340 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4343 case OPC_BLTZALL
: /* 0 < 0 likely */
4344 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4345 /* Skip the instruction in the delay slot */
4348 case OPC_BNEL
: /* rx != rx likely */
4349 case OPC_BGTZL
: /* 0 > 0 likely */
4350 case OPC_BLTZL
: /* 0 < 0 likely */
4351 /* Skip the instruction in the delay slot */
4355 ctx
->hflags
|= MIPS_HFLAG_B
;
4358 ctx
->hflags
|= MIPS_HFLAG_BX
;
4362 ctx
->hflags
|= MIPS_HFLAG_B
;
4365 ctx
->hflags
|= MIPS_HFLAG_BR
;
4369 ctx
->hflags
|= MIPS_HFLAG_BR
;
4372 MIPS_INVAL("branch/jump");
4373 generate_exception_end(ctx
, EXCP_RI
);
4379 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4382 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4385 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4388 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4391 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4397 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4401 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4411 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4414 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4423 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4425 #if defined(TARGET_MIPS64)
4427 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4431 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4434 ctx
->hflags
|= MIPS_HFLAG_BC
;
4437 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4440 ctx
->hflags
|= MIPS_HFLAG_BL
;
4443 MIPS_INVAL("conditional branch/jump");
4444 generate_exception_end(ctx
, EXCP_RI
);
4449 ctx
->btarget
= btgt
;
4451 switch (delayslot_size
) {
4453 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4456 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4461 int post_delay
= insn_bytes
+ delayslot_size
;
4462 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4464 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4468 if (insn_bytes
== 2)
4469 ctx
->hflags
|= MIPS_HFLAG_B16
;
4474 /* special3 bitfield operations */
4475 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4476 int rs
, int lsb
, int msb
)
4478 TCGv t0
= tcg_temp_new();
4479 TCGv t1
= tcg_temp_new();
4481 gen_load_gpr(t1
, rs
);
4484 if (lsb
+ msb
> 31) {
4487 tcg_gen_shri_tl(t0
, t1
, lsb
);
4489 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4491 tcg_gen_ext32s_tl(t0
, t0
);
4494 #if defined(TARGET_MIPS64)
4503 if (lsb
+ msb
> 63) {
4506 tcg_gen_shri_tl(t0
, t1
, lsb
);
4508 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4516 gen_load_gpr(t0
, rt
);
4517 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4518 tcg_gen_ext32s_tl(t0
, t0
);
4520 #if defined(TARGET_MIPS64)
4531 gen_load_gpr(t0
, rt
);
4532 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4537 MIPS_INVAL("bitops");
4538 generate_exception_end(ctx
, EXCP_RI
);
4543 gen_store_gpr(t0
, rt
);
4548 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4553 /* If no destination, treat it as a NOP. */
4557 t0
= tcg_temp_new();
4558 gen_load_gpr(t0
, rt
);
4562 TCGv t1
= tcg_temp_new();
4564 tcg_gen_shri_tl(t1
, t0
, 8);
4565 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4566 tcg_gen_shli_tl(t0
, t0
, 8);
4567 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4568 tcg_gen_or_tl(t0
, t0
, t1
);
4570 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4574 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4577 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4579 #if defined(TARGET_MIPS64)
4582 TCGv t1
= tcg_temp_new();
4584 tcg_gen_shri_tl(t1
, t0
, 8);
4585 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4586 tcg_gen_shli_tl(t0
, t0
, 8);
4587 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4588 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4594 TCGv t1
= tcg_temp_new();
4596 tcg_gen_shri_tl(t1
, t0
, 16);
4597 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4598 tcg_gen_shli_tl(t0
, t0
, 16);
4599 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4600 tcg_gen_or_tl(t0
, t0
, t1
);
4601 tcg_gen_shri_tl(t1
, t0
, 32);
4602 tcg_gen_shli_tl(t0
, t0
, 32);
4603 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4609 MIPS_INVAL("bsfhl");
4610 generate_exception_end(ctx
, EXCP_RI
);
4617 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4626 t0
= tcg_temp_new();
4627 t1
= tcg_temp_new();
4628 gen_load_gpr(t0
, rs
);
4629 gen_load_gpr(t1
, rt
);
4630 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4631 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4632 if (opc
== OPC_LSA
) {
4633 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4642 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4650 t0
= tcg_temp_new();
4651 gen_load_gpr(t0
, rt
);
4655 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4657 #if defined(TARGET_MIPS64)
4659 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4664 TCGv t1
= tcg_temp_new();
4665 gen_load_gpr(t1
, rs
);
4669 TCGv_i64 t2
= tcg_temp_new_i64();
4670 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4671 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4672 gen_move_low32(cpu_gpr
[rd
], t2
);
4673 tcg_temp_free_i64(t2
);
4676 #if defined(TARGET_MIPS64)
4678 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4679 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4680 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4690 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4697 t0
= tcg_temp_new();
4698 gen_load_gpr(t0
, rt
);
4701 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4703 #if defined(TARGET_MIPS64)
4705 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4712 #ifndef CONFIG_USER_ONLY
4713 /* CP0 (MMU and control) */
4714 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4716 TCGv_i64 t0
= tcg_temp_new_i64();
4717 TCGv_i64 t1
= tcg_temp_new_i64();
4719 tcg_gen_ext_tl_i64(t0
, arg
);
4720 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4721 #if defined(TARGET_MIPS64)
4722 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4724 tcg_gen_concat32_i64(t1
, t1
, t0
);
4726 tcg_gen_st_i64(t1
, cpu_env
, off
);
4727 tcg_temp_free_i64(t1
);
4728 tcg_temp_free_i64(t0
);
4731 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4733 TCGv_i64 t0
= tcg_temp_new_i64();
4734 TCGv_i64 t1
= tcg_temp_new_i64();
4736 tcg_gen_ext_tl_i64(t0
, arg
);
4737 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4738 tcg_gen_concat32_i64(t1
, t1
, t0
);
4739 tcg_gen_st_i64(t1
, cpu_env
, off
);
4740 tcg_temp_free_i64(t1
);
4741 tcg_temp_free_i64(t0
);
4744 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4746 TCGv_i64 t0
= tcg_temp_new_i64();
4748 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4749 #if defined(TARGET_MIPS64)
4750 tcg_gen_shri_i64(t0
, t0
, 30);
4752 tcg_gen_shri_i64(t0
, t0
, 32);
4754 gen_move_low32(arg
, t0
);
4755 tcg_temp_free_i64(t0
);
4758 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4760 TCGv_i64 t0
= tcg_temp_new_i64();
4762 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4763 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4764 gen_move_low32(arg
, t0
);
4765 tcg_temp_free_i64(t0
);
4768 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4770 TCGv_i32 t0
= tcg_temp_new_i32();
4772 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4773 tcg_gen_ext_i32_tl(arg
, t0
);
4774 tcg_temp_free_i32(t0
);
4777 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4779 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4780 tcg_gen_ext32s_tl(arg
, arg
);
4783 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4785 TCGv_i32 t0
= tcg_temp_new_i32();
4787 tcg_gen_trunc_tl_i32(t0
, arg
);
4788 tcg_gen_st_i32(t0
, cpu_env
, off
);
4789 tcg_temp_free_i32(t0
);
4792 #define CP0_CHECK(c) \
4795 goto cp0_unimplemented; \
4799 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4801 const char *rn
= "invalid";
4803 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4809 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4813 goto cp0_unimplemented
;
4819 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4823 goto cp0_unimplemented
;
4829 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4830 ctx
->CP0_LLAddr_shift
);
4834 CP0_CHECK(ctx
->mrp
);
4835 gen_helper_mfhc0_maar(arg
, cpu_env
);
4839 goto cp0_unimplemented
;
4848 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4852 goto cp0_unimplemented
;
4856 goto cp0_unimplemented
;
4859 (void)rn
; /* avoid a compiler warning */
4860 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4864 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4865 tcg_gen_movi_tl(arg
, 0);
4868 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4870 const char *rn
= "invalid";
4871 uint64_t mask
= ctx
->PAMask
>> 36;
4873 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4879 tcg_gen_andi_tl(arg
, arg
, mask
);
4880 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4884 goto cp0_unimplemented
;
4890 tcg_gen_andi_tl(arg
, arg
, mask
);
4891 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4895 goto cp0_unimplemented
;
4901 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4902 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4903 relevant for modern MIPS cores supporting MTHC0, therefore
4904 treating MTHC0 to LLAddr as NOP. */
4908 CP0_CHECK(ctx
->mrp
);
4909 gen_helper_mthc0_maar(cpu_env
, arg
);
4913 goto cp0_unimplemented
;
4922 tcg_gen_andi_tl(arg
, arg
, mask
);
4923 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4927 goto cp0_unimplemented
;
4931 goto cp0_unimplemented
;
4934 (void)rn
; /* avoid a compiler warning */
4936 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4939 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4941 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4942 tcg_gen_movi_tl(arg
, 0);
4944 tcg_gen_movi_tl(arg
, ~0);
4948 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4950 const char *rn
= "invalid";
4953 check_insn(ctx
, ISA_MIPS32
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4964 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4969 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4979 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4983 goto cp0_unimplemented
;
4989 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4990 gen_helper_mfc0_random(arg
, cpu_env
);
4994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4995 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5000 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5005 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5009 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5010 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5014 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5015 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5019 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5020 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5021 rn
= "VPEScheFBack";
5024 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5025 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5029 goto cp0_unimplemented
;
5036 TCGv_i64 tmp
= tcg_temp_new_i64();
5037 tcg_gen_ld_i64(tmp
, cpu_env
,
5038 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5039 #if defined(TARGET_MIPS64)
5041 /* Move RI/XI fields to bits 31:30 */
5042 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5043 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5046 gen_move_low32(arg
, tmp
);
5047 tcg_temp_free_i64(tmp
);
5052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5053 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5058 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5063 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5067 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5068 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5072 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5073 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5077 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5078 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5082 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5083 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5087 goto cp0_unimplemented
;
5094 TCGv_i64 tmp
= tcg_temp_new_i64();
5095 tcg_gen_ld_i64(tmp
, cpu_env
,
5096 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5097 #if defined(TARGET_MIPS64)
5099 /* Move RI/XI fields to bits 31:30 */
5100 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5101 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5104 gen_move_low32(arg
, tmp
);
5105 tcg_temp_free_i64(tmp
);
5111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5112 rn
= "GlobalNumber";
5115 goto cp0_unimplemented
;
5121 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5122 tcg_gen_ext32s_tl(arg
, arg
);
5126 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5127 rn
= "ContextConfig";
5128 goto cp0_unimplemented
;
5131 CP0_CHECK(ctx
->ulri
);
5132 tcg_gen_ld32s_tl(arg
, cpu_env
,
5133 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5137 goto cp0_unimplemented
;
5143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5152 goto cp0_unimplemented
;
5158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5162 check_insn(ctx
, ISA_MIPS32R2
);
5163 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5167 check_insn(ctx
, ISA_MIPS32R2
);
5168 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5172 check_insn(ctx
, ISA_MIPS32R2
);
5173 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5177 check_insn(ctx
, ISA_MIPS32R2
);
5178 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5182 check_insn(ctx
, ISA_MIPS32R2
);
5183 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5187 goto cp0_unimplemented
;
5193 check_insn(ctx
, ISA_MIPS32R2
);
5194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5198 goto cp0_unimplemented
;
5204 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5205 tcg_gen_ext32s_tl(arg
, arg
);
5210 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5215 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5219 goto cp0_unimplemented
;
5225 /* Mark as an IO operation because we read the time. */
5226 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5229 gen_helper_mfc0_count(arg
, cpu_env
);
5230 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5233 /* Break the TB to be able to take timer interrupts immediately
5234 after reading count. */
5235 ctx
->bstate
= BS_STOP
;
5238 /* 6,7 are implementation dependent */
5240 goto cp0_unimplemented
;
5246 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5247 tcg_gen_ext32s_tl(arg
, arg
);
5251 goto cp0_unimplemented
;
5257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5260 /* 6,7 are implementation dependent */
5262 goto cp0_unimplemented
;
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5272 check_insn(ctx
, ISA_MIPS32R2
);
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5277 check_insn(ctx
, ISA_MIPS32R2
);
5278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5282 check_insn(ctx
, ISA_MIPS32R2
);
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5287 goto cp0_unimplemented
;
5293 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5297 goto cp0_unimplemented
;
5303 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5304 tcg_gen_ext32s_tl(arg
, arg
);
5308 goto cp0_unimplemented
;
5314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5318 check_insn(ctx
, ISA_MIPS32R2
);
5319 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5323 check_insn(ctx
, ISA_MIPS32R2
);
5324 CP0_CHECK(ctx
->cmgcr
);
5325 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5326 tcg_gen_ext32s_tl(arg
, arg
);
5330 goto cp0_unimplemented
;
5336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5340 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5344 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5359 /* 6,7 are implementation dependent */
5361 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5369 goto cp0_unimplemented
;
5375 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5379 CP0_CHECK(ctx
->mrp
);
5380 gen_helper_mfc0_maar(arg
, cpu_env
);
5384 CP0_CHECK(ctx
->mrp
);
5385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5389 goto cp0_unimplemented
;
5395 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5399 goto cp0_unimplemented
;
5405 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5409 goto cp0_unimplemented
;
5415 #if defined(TARGET_MIPS64)
5416 check_insn(ctx
, ISA_MIPS3
);
5417 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5418 tcg_gen_ext32s_tl(arg
, arg
);
5423 goto cp0_unimplemented
;
5427 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5428 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5435 goto cp0_unimplemented
;
5439 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5440 rn
= "'Diagnostic"; /* implementation dependent */
5445 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5449 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5450 rn
= "TraceControl";
5453 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5454 rn
= "TraceControl2";
5457 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5458 rn
= "UserTraceData";
5461 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5465 goto cp0_unimplemented
;
5472 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5473 tcg_gen_ext32s_tl(arg
, arg
);
5477 goto cp0_unimplemented
;
5483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5484 rn
= "Performance0";
5487 // gen_helper_mfc0_performance1(arg);
5488 rn
= "Performance1";
5491 // gen_helper_mfc0_performance2(arg);
5492 rn
= "Performance2";
5495 // gen_helper_mfc0_performance3(arg);
5496 rn
= "Performance3";
5499 // gen_helper_mfc0_performance4(arg);
5500 rn
= "Performance4";
5503 // gen_helper_mfc0_performance5(arg);
5504 rn
= "Performance5";
5507 // gen_helper_mfc0_performance6(arg);
5508 rn
= "Performance6";
5511 // gen_helper_mfc0_performance7(arg);
5512 rn
= "Performance7";
5515 goto cp0_unimplemented
;
5521 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5525 goto cp0_unimplemented
;
5531 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5535 goto cp0_unimplemented
;
5545 TCGv_i64 tmp
= tcg_temp_new_i64();
5546 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5547 gen_move_low32(arg
, tmp
);
5548 tcg_temp_free_i64(tmp
);
5556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5560 goto cp0_unimplemented
;
5569 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5576 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5580 goto cp0_unimplemented
;
5586 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5587 tcg_gen_ext32s_tl(arg
, arg
);
5591 goto cp0_unimplemented
;
5598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5602 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5603 tcg_gen_ld_tl(arg
, cpu_env
,
5604 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5605 tcg_gen_ext32s_tl(arg
, arg
);
5609 goto cp0_unimplemented
;
5613 goto cp0_unimplemented
;
5615 (void)rn
; /* avoid a compiler warning */
5616 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5620 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5621 gen_mfc0_unimplemented(ctx
, arg
);
5624 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5626 const char *rn
= "invalid";
5629 check_insn(ctx
, ISA_MIPS32
);
5631 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5639 gen_helper_mtc0_index(cpu_env
, arg
);
5643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5644 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5648 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5653 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5663 goto cp0_unimplemented
;
5673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5674 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5679 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5684 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5689 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5693 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5694 tcg_gen_st_tl(arg
, cpu_env
,
5695 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5699 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5700 tcg_gen_st_tl(arg
, cpu_env
,
5701 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5702 rn
= "VPEScheFBack";
5705 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5706 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5710 goto cp0_unimplemented
;
5716 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5720 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5721 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5725 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5726 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5730 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5731 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5735 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5736 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5740 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5741 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5745 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5746 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5750 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5751 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5755 goto cp0_unimplemented
;
5761 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5767 rn
= "GlobalNumber";
5770 goto cp0_unimplemented
;
5776 gen_helper_mtc0_context(cpu_env
, arg
);
5780 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5781 rn
= "ContextConfig";
5782 goto cp0_unimplemented
;
5785 CP0_CHECK(ctx
->ulri
);
5786 tcg_gen_st_tl(arg
, cpu_env
,
5787 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5791 goto cp0_unimplemented
;
5797 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5801 check_insn(ctx
, ISA_MIPS32R2
);
5802 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5804 ctx
->bstate
= BS_STOP
;
5807 goto cp0_unimplemented
;
5813 gen_helper_mtc0_wired(cpu_env
, arg
);
5817 check_insn(ctx
, ISA_MIPS32R2
);
5818 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5822 check_insn(ctx
, ISA_MIPS32R2
);
5823 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5827 check_insn(ctx
, ISA_MIPS32R2
);
5828 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5832 check_insn(ctx
, ISA_MIPS32R2
);
5833 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5837 check_insn(ctx
, ISA_MIPS32R2
);
5838 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5842 goto cp0_unimplemented
;
5848 check_insn(ctx
, ISA_MIPS32R2
);
5849 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5850 ctx
->bstate
= BS_STOP
;
5854 goto cp0_unimplemented
;
5872 goto cp0_unimplemented
;
5878 gen_helper_mtc0_count(cpu_env
, arg
);
5881 /* 6,7 are implementation dependent */
5883 goto cp0_unimplemented
;
5889 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5893 goto cp0_unimplemented
;
5899 gen_helper_mtc0_compare(cpu_env
, arg
);
5902 /* 6,7 are implementation dependent */
5904 goto cp0_unimplemented
;
5910 save_cpu_state(ctx
, 1);
5911 gen_helper_mtc0_status(cpu_env
, arg
);
5912 /* BS_STOP isn't good enough here, hflags may have changed. */
5913 gen_save_pc(ctx
->pc
+ 4);
5914 ctx
->bstate
= BS_EXCP
;
5918 check_insn(ctx
, ISA_MIPS32R2
);
5919 gen_helper_mtc0_intctl(cpu_env
, arg
);
5920 /* Stop translation as we may have switched the execution mode */
5921 ctx
->bstate
= BS_STOP
;
5925 check_insn(ctx
, ISA_MIPS32R2
);
5926 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5927 /* Stop translation as we may have switched the execution mode */
5928 ctx
->bstate
= BS_STOP
;
5932 check_insn(ctx
, ISA_MIPS32R2
);
5933 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5934 /* Stop translation as we may have switched the execution mode */
5935 ctx
->bstate
= BS_STOP
;
5939 goto cp0_unimplemented
;
5945 save_cpu_state(ctx
, 1);
5946 gen_helper_mtc0_cause(cpu_env
, arg
);
5950 goto cp0_unimplemented
;
5956 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5960 goto cp0_unimplemented
;
5970 check_insn(ctx
, ISA_MIPS32R2
);
5971 gen_helper_mtc0_ebase(cpu_env
, arg
);
5975 goto cp0_unimplemented
;
5981 gen_helper_mtc0_config0(cpu_env
, arg
);
5983 /* Stop translation as we may have switched the execution mode */
5984 ctx
->bstate
= BS_STOP
;
5987 /* ignored, read only */
5991 gen_helper_mtc0_config2(cpu_env
, arg
);
5993 /* Stop translation as we may have switched the execution mode */
5994 ctx
->bstate
= BS_STOP
;
5997 gen_helper_mtc0_config3(cpu_env
, arg
);
5999 /* Stop translation as we may have switched the execution mode */
6000 ctx
->bstate
= BS_STOP
;
6003 gen_helper_mtc0_config4(cpu_env
, arg
);
6005 ctx
->bstate
= BS_STOP
;
6008 gen_helper_mtc0_config5(cpu_env
, arg
);
6010 /* Stop translation as we may have switched the execution mode */
6011 ctx
->bstate
= BS_STOP
;
6013 /* 6,7 are implementation dependent */
6023 rn
= "Invalid config selector";
6024 goto cp0_unimplemented
;
6030 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6034 CP0_CHECK(ctx
->mrp
);
6035 gen_helper_mtc0_maar(cpu_env
, arg
);
6039 CP0_CHECK(ctx
->mrp
);
6040 gen_helper_mtc0_maari(cpu_env
, arg
);
6044 goto cp0_unimplemented
;
6050 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6054 goto cp0_unimplemented
;
6060 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6064 goto cp0_unimplemented
;
6070 #if defined(TARGET_MIPS64)
6071 check_insn(ctx
, ISA_MIPS3
);
6072 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6077 goto cp0_unimplemented
;
6081 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6082 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6085 gen_helper_mtc0_framemask(cpu_env
, arg
);
6089 goto cp0_unimplemented
;
6094 rn
= "Diagnostic"; /* implementation dependent */
6099 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6100 /* BS_STOP isn't good enough here, hflags may have changed. */
6101 gen_save_pc(ctx
->pc
+ 4);
6102 ctx
->bstate
= BS_EXCP
;
6106 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6107 rn
= "TraceControl";
6108 /* Stop translation as we may have switched the execution mode */
6109 ctx
->bstate
= BS_STOP
;
6112 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6113 rn
= "TraceControl2";
6114 /* Stop translation as we may have switched the execution mode */
6115 ctx
->bstate
= BS_STOP
;
6118 /* Stop translation as we may have switched the execution mode */
6119 ctx
->bstate
= BS_STOP
;
6120 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6121 rn
= "UserTraceData";
6122 /* Stop translation as we may have switched the execution mode */
6123 ctx
->bstate
= BS_STOP
;
6126 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6127 /* Stop translation as we may have switched the execution mode */
6128 ctx
->bstate
= BS_STOP
;
6132 goto cp0_unimplemented
;
6139 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6143 goto cp0_unimplemented
;
6149 gen_helper_mtc0_performance0(cpu_env
, arg
);
6150 rn
= "Performance0";
6153 // gen_helper_mtc0_performance1(arg);
6154 rn
= "Performance1";
6157 // gen_helper_mtc0_performance2(arg);
6158 rn
= "Performance2";
6161 // gen_helper_mtc0_performance3(arg);
6162 rn
= "Performance3";
6165 // gen_helper_mtc0_performance4(arg);
6166 rn
= "Performance4";
6169 // gen_helper_mtc0_performance5(arg);
6170 rn
= "Performance5";
6173 // gen_helper_mtc0_performance6(arg);
6174 rn
= "Performance6";
6177 // gen_helper_mtc0_performance7(arg);
6178 rn
= "Performance7";
6181 goto cp0_unimplemented
;
6187 gen_helper_mtc0_errctl(cpu_env
, arg
);
6188 ctx
->bstate
= BS_STOP
;
6192 goto cp0_unimplemented
;
6202 goto cp0_unimplemented
;
6211 gen_helper_mtc0_taglo(cpu_env
, arg
);
6218 gen_helper_mtc0_datalo(cpu_env
, arg
);
6222 goto cp0_unimplemented
;
6231 gen_helper_mtc0_taghi(cpu_env
, arg
);
6238 gen_helper_mtc0_datahi(cpu_env
, arg
);
6243 goto cp0_unimplemented
;
6249 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6253 goto cp0_unimplemented
;
6260 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6264 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6265 tcg_gen_st_tl(arg
, cpu_env
,
6266 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6270 goto cp0_unimplemented
;
6272 /* Stop translation as we may have switched the execution mode */
6273 ctx
->bstate
= BS_STOP
;
6276 goto cp0_unimplemented
;
6278 (void)rn
; /* avoid a compiler warning */
6279 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6280 /* For simplicity assume that all writes can cause interrupts. */
6281 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6283 ctx
->bstate
= BS_STOP
;
6288 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6291 #if defined(TARGET_MIPS64)
6292 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6294 const char *rn
= "invalid";
6297 check_insn(ctx
, ISA_MIPS64
);
6303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6313 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6317 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6318 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6327 goto cp0_unimplemented
;
6333 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6334 gen_helper_mfc0_random(arg
, cpu_env
);
6338 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6343 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6344 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6348 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6353 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6354 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6359 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6364 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6365 rn
= "VPEScheFBack";
6368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6373 goto cp0_unimplemented
;
6379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6383 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6384 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6388 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6389 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6393 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6394 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6398 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6399 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6403 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6404 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6408 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6409 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6413 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6414 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6418 goto cp0_unimplemented
;
6424 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6430 rn
= "GlobalNumber";
6433 goto cp0_unimplemented
;
6439 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6443 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6444 rn
= "ContextConfig";
6445 goto cp0_unimplemented
;
6448 CP0_CHECK(ctx
->ulri
);
6449 tcg_gen_ld_tl(arg
, cpu_env
,
6450 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6454 goto cp0_unimplemented
;
6460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6464 check_insn(ctx
, ISA_MIPS32R2
);
6465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6469 goto cp0_unimplemented
;
6475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6479 check_insn(ctx
, ISA_MIPS32R2
);
6480 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6484 check_insn(ctx
, ISA_MIPS32R2
);
6485 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6489 check_insn(ctx
, ISA_MIPS32R2
);
6490 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6494 check_insn(ctx
, ISA_MIPS32R2
);
6495 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6499 check_insn(ctx
, ISA_MIPS32R2
);
6500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6504 goto cp0_unimplemented
;
6510 check_insn(ctx
, ISA_MIPS32R2
);
6511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6515 goto cp0_unimplemented
;
6521 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6526 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6535 goto cp0_unimplemented
;
6541 /* Mark as an IO operation because we read the time. */
6542 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6545 gen_helper_mfc0_count(arg
, cpu_env
);
6546 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6549 /* Break the TB to be able to take timer interrupts immediately
6550 after reading count. */
6551 ctx
->bstate
= BS_STOP
;
6554 /* 6,7 are implementation dependent */
6556 goto cp0_unimplemented
;
6562 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6566 goto cp0_unimplemented
;
6572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6575 /* 6,7 are implementation dependent */
6577 goto cp0_unimplemented
;
6583 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6587 check_insn(ctx
, ISA_MIPS32R2
);
6588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6592 check_insn(ctx
, ISA_MIPS32R2
);
6593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6597 check_insn(ctx
, ISA_MIPS32R2
);
6598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6602 goto cp0_unimplemented
;
6608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6612 goto cp0_unimplemented
;
6618 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6622 goto cp0_unimplemented
;
6628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6632 check_insn(ctx
, ISA_MIPS32R2
);
6633 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6637 check_insn(ctx
, ISA_MIPS32R2
);
6638 CP0_CHECK(ctx
->cmgcr
);
6639 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6643 goto cp0_unimplemented
;
6649 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6653 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6657 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6672 /* 6,7 are implementation dependent */
6674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6682 goto cp0_unimplemented
;
6688 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6692 CP0_CHECK(ctx
->mrp
);
6693 gen_helper_dmfc0_maar(arg
, cpu_env
);
6697 CP0_CHECK(ctx
->mrp
);
6698 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6702 goto cp0_unimplemented
;
6708 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6712 goto cp0_unimplemented
;
6718 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6722 goto cp0_unimplemented
;
6728 check_insn(ctx
, ISA_MIPS3
);
6729 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6733 goto cp0_unimplemented
;
6737 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6738 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6741 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6745 goto cp0_unimplemented
;
6749 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6750 rn
= "'Diagnostic"; /* implementation dependent */
6755 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6759 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6760 rn
= "TraceControl";
6763 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6764 rn
= "TraceControl2";
6767 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6768 rn
= "UserTraceData";
6771 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6775 goto cp0_unimplemented
;
6782 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6786 goto cp0_unimplemented
;
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6793 rn
= "Performance0";
6796 // gen_helper_dmfc0_performance1(arg);
6797 rn
= "Performance1";
6800 // gen_helper_dmfc0_performance2(arg);
6801 rn
= "Performance2";
6804 // gen_helper_dmfc0_performance3(arg);
6805 rn
= "Performance3";
6808 // gen_helper_dmfc0_performance4(arg);
6809 rn
= "Performance4";
6812 // gen_helper_dmfc0_performance5(arg);
6813 rn
= "Performance5";
6816 // gen_helper_dmfc0_performance6(arg);
6817 rn
= "Performance6";
6820 // gen_helper_dmfc0_performance7(arg);
6821 rn
= "Performance7";
6824 goto cp0_unimplemented
;
6830 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6834 goto cp0_unimplemented
;
6841 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6845 goto cp0_unimplemented
;
6854 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6865 goto cp0_unimplemented
;
6874 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6885 goto cp0_unimplemented
;
6891 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6895 goto cp0_unimplemented
;
6902 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6906 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6907 tcg_gen_ld_tl(arg
, cpu_env
,
6908 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6912 goto cp0_unimplemented
;
6916 goto cp0_unimplemented
;
6918 (void)rn
; /* avoid a compiler warning */
6919 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6923 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6924 gen_mfc0_unimplemented(ctx
, arg
);
6927 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6929 const char *rn
= "invalid";
6932 check_insn(ctx
, ISA_MIPS64
);
6934 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6942 gen_helper_mtc0_index(cpu_env
, arg
);
6946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6947 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6966 goto cp0_unimplemented
;
6976 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6977 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6982 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6987 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6992 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6997 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7002 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7003 rn
= "VPEScheFBack";
7006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7007 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7011 goto cp0_unimplemented
;
7017 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7022 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7027 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7032 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7037 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7042 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7047 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7052 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7056 goto cp0_unimplemented
;
7062 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7068 rn
= "GlobalNumber";
7071 goto cp0_unimplemented
;
7077 gen_helper_mtc0_context(cpu_env
, arg
);
7081 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7082 rn
= "ContextConfig";
7083 goto cp0_unimplemented
;
7086 CP0_CHECK(ctx
->ulri
);
7087 tcg_gen_st_tl(arg
, cpu_env
,
7088 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7092 goto cp0_unimplemented
;
7098 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7102 check_insn(ctx
, ISA_MIPS32R2
);
7103 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7107 goto cp0_unimplemented
;
7113 gen_helper_mtc0_wired(cpu_env
, arg
);
7117 check_insn(ctx
, ISA_MIPS32R2
);
7118 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7122 check_insn(ctx
, ISA_MIPS32R2
);
7123 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7127 check_insn(ctx
, ISA_MIPS32R2
);
7128 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7132 check_insn(ctx
, ISA_MIPS32R2
);
7133 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7142 goto cp0_unimplemented
;
7148 check_insn(ctx
, ISA_MIPS32R2
);
7149 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7150 ctx
->bstate
= BS_STOP
;
7154 goto cp0_unimplemented
;
7172 goto cp0_unimplemented
;
7178 gen_helper_mtc0_count(cpu_env
, arg
);
7181 /* 6,7 are implementation dependent */
7183 goto cp0_unimplemented
;
7185 /* Stop translation as we may have switched the execution mode */
7186 ctx
->bstate
= BS_STOP
;
7191 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7195 goto cp0_unimplemented
;
7201 gen_helper_mtc0_compare(cpu_env
, arg
);
7204 /* 6,7 are implementation dependent */
7206 goto cp0_unimplemented
;
7208 /* Stop translation as we may have switched the execution mode */
7209 ctx
->bstate
= BS_STOP
;
7214 save_cpu_state(ctx
, 1);
7215 gen_helper_mtc0_status(cpu_env
, arg
);
7216 /* BS_STOP isn't good enough here, hflags may have changed. */
7217 gen_save_pc(ctx
->pc
+ 4);
7218 ctx
->bstate
= BS_EXCP
;
7222 check_insn(ctx
, ISA_MIPS32R2
);
7223 gen_helper_mtc0_intctl(cpu_env
, arg
);
7224 /* Stop translation as we may have switched the execution mode */
7225 ctx
->bstate
= BS_STOP
;
7229 check_insn(ctx
, ISA_MIPS32R2
);
7230 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7231 /* Stop translation as we may have switched the execution mode */
7232 ctx
->bstate
= BS_STOP
;
7236 check_insn(ctx
, ISA_MIPS32R2
);
7237 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7238 /* Stop translation as we may have switched the execution mode */
7239 ctx
->bstate
= BS_STOP
;
7243 goto cp0_unimplemented
;
7249 save_cpu_state(ctx
, 1);
7250 /* Mark as an IO operation because we may trigger a software
7252 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7255 gen_helper_mtc0_cause(cpu_env
, arg
);
7256 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7259 /* Stop translation as we may have triggered an intetrupt */
7260 ctx
->bstate
= BS_STOP
;
7264 goto cp0_unimplemented
;
7270 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7274 goto cp0_unimplemented
;
7284 check_insn(ctx
, ISA_MIPS32R2
);
7285 gen_helper_mtc0_ebase(cpu_env
, arg
);
7289 goto cp0_unimplemented
;
7295 gen_helper_mtc0_config0(cpu_env
, arg
);
7297 /* Stop translation as we may have switched the execution mode */
7298 ctx
->bstate
= BS_STOP
;
7301 /* ignored, read only */
7305 gen_helper_mtc0_config2(cpu_env
, arg
);
7307 /* Stop translation as we may have switched the execution mode */
7308 ctx
->bstate
= BS_STOP
;
7311 gen_helper_mtc0_config3(cpu_env
, arg
);
7313 /* Stop translation as we may have switched the execution mode */
7314 ctx
->bstate
= BS_STOP
;
7317 /* currently ignored */
7321 gen_helper_mtc0_config5(cpu_env
, arg
);
7323 /* Stop translation as we may have switched the execution mode */
7324 ctx
->bstate
= BS_STOP
;
7326 /* 6,7 are implementation dependent */
7328 rn
= "Invalid config selector";
7329 goto cp0_unimplemented
;
7335 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7339 CP0_CHECK(ctx
->mrp
);
7340 gen_helper_mtc0_maar(cpu_env
, arg
);
7344 CP0_CHECK(ctx
->mrp
);
7345 gen_helper_mtc0_maari(cpu_env
, arg
);
7349 goto cp0_unimplemented
;
7355 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7359 goto cp0_unimplemented
;
7365 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7369 goto cp0_unimplemented
;
7375 check_insn(ctx
, ISA_MIPS3
);
7376 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7380 goto cp0_unimplemented
;
7384 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7385 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7388 gen_helper_mtc0_framemask(cpu_env
, arg
);
7392 goto cp0_unimplemented
;
7397 rn
= "Diagnostic"; /* implementation dependent */
7402 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7403 /* BS_STOP isn't good enough here, hflags may have changed. */
7404 gen_save_pc(ctx
->pc
+ 4);
7405 ctx
->bstate
= BS_EXCP
;
7409 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7410 /* Stop translation as we may have switched the execution mode */
7411 ctx
->bstate
= BS_STOP
;
7412 rn
= "TraceControl";
7415 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7416 /* Stop translation as we may have switched the execution mode */
7417 ctx
->bstate
= BS_STOP
;
7418 rn
= "TraceControl2";
7421 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7422 /* Stop translation as we may have switched the execution mode */
7423 ctx
->bstate
= BS_STOP
;
7424 rn
= "UserTraceData";
7427 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7428 /* Stop translation as we may have switched the execution mode */
7429 ctx
->bstate
= BS_STOP
;
7433 goto cp0_unimplemented
;
7440 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7444 goto cp0_unimplemented
;
7450 gen_helper_mtc0_performance0(cpu_env
, arg
);
7451 rn
= "Performance0";
7454 // gen_helper_mtc0_performance1(cpu_env, arg);
7455 rn
= "Performance1";
7458 // gen_helper_mtc0_performance2(cpu_env, arg);
7459 rn
= "Performance2";
7462 // gen_helper_mtc0_performance3(cpu_env, arg);
7463 rn
= "Performance3";
7466 // gen_helper_mtc0_performance4(cpu_env, arg);
7467 rn
= "Performance4";
7470 // gen_helper_mtc0_performance5(cpu_env, arg);
7471 rn
= "Performance5";
7474 // gen_helper_mtc0_performance6(cpu_env, arg);
7475 rn
= "Performance6";
7478 // gen_helper_mtc0_performance7(cpu_env, arg);
7479 rn
= "Performance7";
7482 goto cp0_unimplemented
;
7488 gen_helper_mtc0_errctl(cpu_env
, arg
);
7489 ctx
->bstate
= BS_STOP
;
7493 goto cp0_unimplemented
;
7503 goto cp0_unimplemented
;
7512 gen_helper_mtc0_taglo(cpu_env
, arg
);
7519 gen_helper_mtc0_datalo(cpu_env
, arg
);
7523 goto cp0_unimplemented
;
7532 gen_helper_mtc0_taghi(cpu_env
, arg
);
7539 gen_helper_mtc0_datahi(cpu_env
, arg
);
7544 goto cp0_unimplemented
;
7550 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7554 goto cp0_unimplemented
;
7561 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7565 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7566 tcg_gen_st_tl(arg
, cpu_env
,
7567 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7571 goto cp0_unimplemented
;
7573 /* Stop translation as we may have switched the execution mode */
7574 ctx
->bstate
= BS_STOP
;
7577 goto cp0_unimplemented
;
7579 (void)rn
; /* avoid a compiler warning */
7580 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7581 /* For simplicity assume that all writes can cause interrupts. */
7582 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7584 ctx
->bstate
= BS_STOP
;
7589 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7591 #endif /* TARGET_MIPS64 */
7593 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7594 int u
, int sel
, int h
)
7596 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7597 TCGv t0
= tcg_temp_local_new();
7599 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7600 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7601 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7602 tcg_gen_movi_tl(t0
, -1);
7603 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7604 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7605 tcg_gen_movi_tl(t0
, -1);
7611 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7614 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7624 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7627 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7630 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7633 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7636 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7639 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7642 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7645 gen_mfc0(ctx
, t0
, rt
, sel
);
7652 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7655 gen_mfc0(ctx
, t0
, rt
, sel
);
7661 gen_helper_mftc0_status(t0
, cpu_env
);
7664 gen_mfc0(ctx
, t0
, rt
, sel
);
7670 gen_helper_mftc0_cause(t0
, cpu_env
);
7680 gen_helper_mftc0_epc(t0
, cpu_env
);
7690 gen_helper_mftc0_ebase(t0
, cpu_env
);
7700 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7710 gen_helper_mftc0_debug(t0
, cpu_env
);
7713 gen_mfc0(ctx
, t0
, rt
, sel
);
7718 gen_mfc0(ctx
, t0
, rt
, sel
);
7720 } else switch (sel
) {
7721 /* GPR registers. */
7723 gen_helper_1e0i(mftgpr
, t0
, rt
);
7725 /* Auxiliary CPU registers */
7729 gen_helper_1e0i(mftlo
, t0
, 0);
7732 gen_helper_1e0i(mfthi
, t0
, 0);
7735 gen_helper_1e0i(mftacx
, t0
, 0);
7738 gen_helper_1e0i(mftlo
, t0
, 1);
7741 gen_helper_1e0i(mfthi
, t0
, 1);
7744 gen_helper_1e0i(mftacx
, t0
, 1);
7747 gen_helper_1e0i(mftlo
, t0
, 2);
7750 gen_helper_1e0i(mfthi
, t0
, 2);
7753 gen_helper_1e0i(mftacx
, t0
, 2);
7756 gen_helper_1e0i(mftlo
, t0
, 3);
7759 gen_helper_1e0i(mfthi
, t0
, 3);
7762 gen_helper_1e0i(mftacx
, t0
, 3);
7765 gen_helper_mftdsp(t0
, cpu_env
);
7771 /* Floating point (COP1). */
7773 /* XXX: For now we support only a single FPU context. */
7775 TCGv_i32 fp0
= tcg_temp_new_i32();
7777 gen_load_fpr32(ctx
, fp0
, rt
);
7778 tcg_gen_ext_i32_tl(t0
, fp0
);
7779 tcg_temp_free_i32(fp0
);
7781 TCGv_i32 fp0
= tcg_temp_new_i32();
7783 gen_load_fpr32h(ctx
, fp0
, rt
);
7784 tcg_gen_ext_i32_tl(t0
, fp0
);
7785 tcg_temp_free_i32(fp0
);
7789 /* XXX: For now we support only a single FPU context. */
7790 gen_helper_1e0i(cfc1
, t0
, rt
);
7792 /* COP2: Not implemented. */
7799 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7800 gen_store_gpr(t0
, rd
);
7806 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7807 generate_exception_end(ctx
, EXCP_RI
);
7810 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7811 int u
, int sel
, int h
)
7813 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7814 TCGv t0
= tcg_temp_local_new();
7816 gen_load_gpr(t0
, rt
);
7817 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7818 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7819 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7821 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7822 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7829 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7832 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7842 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7845 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7848 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7851 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7854 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7857 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7860 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7863 gen_mtc0(ctx
, t0
, rd
, sel
);
7870 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7873 gen_mtc0(ctx
, t0
, rd
, sel
);
7879 gen_helper_mttc0_status(cpu_env
, t0
);
7882 gen_mtc0(ctx
, t0
, rd
, sel
);
7888 gen_helper_mttc0_cause(cpu_env
, t0
);
7898 gen_helper_mttc0_ebase(cpu_env
, t0
);
7908 gen_helper_mttc0_debug(cpu_env
, t0
);
7911 gen_mtc0(ctx
, t0
, rd
, sel
);
7916 gen_mtc0(ctx
, t0
, rd
, sel
);
7918 } else switch (sel
) {
7919 /* GPR registers. */
7921 gen_helper_0e1i(mttgpr
, t0
, rd
);
7923 /* Auxiliary CPU registers */
7927 gen_helper_0e1i(mttlo
, t0
, 0);
7930 gen_helper_0e1i(mtthi
, t0
, 0);
7933 gen_helper_0e1i(mttacx
, t0
, 0);
7936 gen_helper_0e1i(mttlo
, t0
, 1);
7939 gen_helper_0e1i(mtthi
, t0
, 1);
7942 gen_helper_0e1i(mttacx
, t0
, 1);
7945 gen_helper_0e1i(mttlo
, t0
, 2);
7948 gen_helper_0e1i(mtthi
, t0
, 2);
7951 gen_helper_0e1i(mttacx
, t0
, 2);
7954 gen_helper_0e1i(mttlo
, t0
, 3);
7957 gen_helper_0e1i(mtthi
, t0
, 3);
7960 gen_helper_0e1i(mttacx
, t0
, 3);
7963 gen_helper_mttdsp(cpu_env
, t0
);
7969 /* Floating point (COP1). */
7971 /* XXX: For now we support only a single FPU context. */
7973 TCGv_i32 fp0
= tcg_temp_new_i32();
7975 tcg_gen_trunc_tl_i32(fp0
, t0
);
7976 gen_store_fpr32(ctx
, fp0
, rd
);
7977 tcg_temp_free_i32(fp0
);
7979 TCGv_i32 fp0
= tcg_temp_new_i32();
7981 tcg_gen_trunc_tl_i32(fp0
, t0
);
7982 gen_store_fpr32h(ctx
, fp0
, rd
);
7983 tcg_temp_free_i32(fp0
);
7987 /* XXX: For now we support only a single FPU context. */
7989 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7991 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7992 tcg_temp_free_i32(fs_tmp
);
7994 /* Stop translation as we may have changed hflags */
7995 ctx
->bstate
= BS_STOP
;
7997 /* COP2: Not implemented. */
8004 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8010 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8011 generate_exception_end(ctx
, EXCP_RI
);
8014 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8016 const char *opn
= "ldst";
8018 check_cp0_enabled(ctx
);
8025 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8030 TCGv t0
= tcg_temp_new();
8032 gen_load_gpr(t0
, rt
);
8033 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8038 #if defined(TARGET_MIPS64)
8040 check_insn(ctx
, ISA_MIPS3
);
8045 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8049 check_insn(ctx
, ISA_MIPS3
);
8051 TCGv t0
= tcg_temp_new();
8053 gen_load_gpr(t0
, rt
);
8054 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8066 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8072 TCGv t0
= tcg_temp_new();
8073 gen_load_gpr(t0
, rt
);
8074 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8080 check_insn(ctx
, ASE_MT
);
8085 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8086 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8090 check_insn(ctx
, ASE_MT
);
8091 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8092 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8097 if (!env
->tlb
->helper_tlbwi
)
8099 gen_helper_tlbwi(cpu_env
);
8104 if (!env
->tlb
->helper_tlbinv
) {
8107 gen_helper_tlbinv(cpu_env
);
8108 } /* treat as nop if TLBINV not supported */
8113 if (!env
->tlb
->helper_tlbinvf
) {
8116 gen_helper_tlbinvf(cpu_env
);
8117 } /* treat as nop if TLBINV not supported */
8121 if (!env
->tlb
->helper_tlbwr
)
8123 gen_helper_tlbwr(cpu_env
);
8127 if (!env
->tlb
->helper_tlbp
)
8129 gen_helper_tlbp(cpu_env
);
8133 if (!env
->tlb
->helper_tlbr
)
8135 gen_helper_tlbr(cpu_env
);
8137 case OPC_ERET
: /* OPC_ERETNC */
8138 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8139 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8142 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8143 if (ctx
->opcode
& (1 << bit_shift
)) {
8146 check_insn(ctx
, ISA_MIPS32R5
);
8147 gen_helper_eretnc(cpu_env
);
8151 check_insn(ctx
, ISA_MIPS2
);
8152 gen_helper_eret(cpu_env
);
8154 ctx
->bstate
= BS_EXCP
;
8159 check_insn(ctx
, ISA_MIPS32
);
8160 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8161 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8164 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8166 generate_exception_end(ctx
, EXCP_RI
);
8168 gen_helper_deret(cpu_env
);
8169 ctx
->bstate
= BS_EXCP
;
8174 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8175 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8176 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8179 /* If we get an exception, we want to restart at next instruction */
8181 save_cpu_state(ctx
, 1);
8183 gen_helper_wait(cpu_env
);
8184 ctx
->bstate
= BS_EXCP
;
8189 generate_exception_end(ctx
, EXCP_RI
);
8192 (void)opn
; /* avoid a compiler warning */
8194 #endif /* !CONFIG_USER_ONLY */
8196 /* CP1 Branches (before delay slot) */
8197 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8198 int32_t cc
, int32_t offset
)
8200 target_ulong btarget
;
8201 TCGv_i32 t0
= tcg_temp_new_i32();
8203 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8204 generate_exception_end(ctx
, EXCP_RI
);
8209 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8211 btarget
= ctx
->pc
+ 4 + offset
;
8215 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8216 tcg_gen_not_i32(t0
, t0
);
8217 tcg_gen_andi_i32(t0
, t0
, 1);
8218 tcg_gen_extu_i32_tl(bcond
, t0
);
8221 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8222 tcg_gen_not_i32(t0
, t0
);
8223 tcg_gen_andi_i32(t0
, t0
, 1);
8224 tcg_gen_extu_i32_tl(bcond
, t0
);
8227 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8228 tcg_gen_andi_i32(t0
, t0
, 1);
8229 tcg_gen_extu_i32_tl(bcond
, t0
);
8232 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8233 tcg_gen_andi_i32(t0
, t0
, 1);
8234 tcg_gen_extu_i32_tl(bcond
, t0
);
8236 ctx
->hflags
|= MIPS_HFLAG_BL
;
8240 TCGv_i32 t1
= tcg_temp_new_i32();
8241 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8242 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8243 tcg_gen_nand_i32(t0
, t0
, t1
);
8244 tcg_temp_free_i32(t1
);
8245 tcg_gen_andi_i32(t0
, t0
, 1);
8246 tcg_gen_extu_i32_tl(bcond
, t0
);
8251 TCGv_i32 t1
= tcg_temp_new_i32();
8252 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8253 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8254 tcg_gen_or_i32(t0
, t0
, t1
);
8255 tcg_temp_free_i32(t1
);
8256 tcg_gen_andi_i32(t0
, t0
, 1);
8257 tcg_gen_extu_i32_tl(bcond
, t0
);
8262 TCGv_i32 t1
= tcg_temp_new_i32();
8263 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8264 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8265 tcg_gen_and_i32(t0
, t0
, t1
);
8266 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8267 tcg_gen_and_i32(t0
, t0
, t1
);
8268 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8269 tcg_gen_nand_i32(t0
, t0
, t1
);
8270 tcg_temp_free_i32(t1
);
8271 tcg_gen_andi_i32(t0
, t0
, 1);
8272 tcg_gen_extu_i32_tl(bcond
, t0
);
8277 TCGv_i32 t1
= tcg_temp_new_i32();
8278 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8279 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8280 tcg_gen_or_i32(t0
, t0
, t1
);
8281 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8282 tcg_gen_or_i32(t0
, t0
, t1
);
8283 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8284 tcg_gen_or_i32(t0
, t0
, t1
);
8285 tcg_temp_free_i32(t1
);
8286 tcg_gen_andi_i32(t0
, t0
, 1);
8287 tcg_gen_extu_i32_tl(bcond
, t0
);
8290 ctx
->hflags
|= MIPS_HFLAG_BC
;
8293 MIPS_INVAL("cp1 cond branch");
8294 generate_exception_end(ctx
, EXCP_RI
);
8297 ctx
->btarget
= btarget
;
8298 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8300 tcg_temp_free_i32(t0
);
8303 /* R6 CP1 Branches */
8304 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8305 int32_t ft
, int32_t offset
,
8308 target_ulong btarget
;
8309 TCGv_i64 t0
= tcg_temp_new_i64();
8311 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8312 #ifdef MIPS_DEBUG_DISAS
8313 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8316 generate_exception_end(ctx
, EXCP_RI
);
8320 gen_load_fpr64(ctx
, t0
, ft
);
8321 tcg_gen_andi_i64(t0
, t0
, 1);
8323 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8327 tcg_gen_xori_i64(t0
, t0
, 1);
8328 ctx
->hflags
|= MIPS_HFLAG_BC
;
8331 /* t0 already set */
8332 ctx
->hflags
|= MIPS_HFLAG_BC
;
8335 MIPS_INVAL("cp1 cond branch");
8336 generate_exception_end(ctx
, EXCP_RI
);
8340 tcg_gen_trunc_i64_tl(bcond
, t0
);
8342 ctx
->btarget
= btarget
;
8344 switch (delayslot_size
) {
8346 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8349 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8354 tcg_temp_free_i64(t0
);
8357 /* Coprocessor 1 (FPU) */
8359 #define FOP(func, fmt) (((fmt) << 21) | (func))
8362 OPC_ADD_S
= FOP(0, FMT_S
),
8363 OPC_SUB_S
= FOP(1, FMT_S
),
8364 OPC_MUL_S
= FOP(2, FMT_S
),
8365 OPC_DIV_S
= FOP(3, FMT_S
),
8366 OPC_SQRT_S
= FOP(4, FMT_S
),
8367 OPC_ABS_S
= FOP(5, FMT_S
),
8368 OPC_MOV_S
= FOP(6, FMT_S
),
8369 OPC_NEG_S
= FOP(7, FMT_S
),
8370 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8371 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8372 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8373 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8374 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8375 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8376 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8377 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8378 OPC_SEL_S
= FOP(16, FMT_S
),
8379 OPC_MOVCF_S
= FOP(17, FMT_S
),
8380 OPC_MOVZ_S
= FOP(18, FMT_S
),
8381 OPC_MOVN_S
= FOP(19, FMT_S
),
8382 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8383 OPC_RECIP_S
= FOP(21, FMT_S
),
8384 OPC_RSQRT_S
= FOP(22, FMT_S
),
8385 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8386 OPC_MADDF_S
= FOP(24, FMT_S
),
8387 OPC_MSUBF_S
= FOP(25, FMT_S
),
8388 OPC_RINT_S
= FOP(26, FMT_S
),
8389 OPC_CLASS_S
= FOP(27, FMT_S
),
8390 OPC_MIN_S
= FOP(28, FMT_S
),
8391 OPC_RECIP2_S
= FOP(28, FMT_S
),
8392 OPC_MINA_S
= FOP(29, FMT_S
),
8393 OPC_RECIP1_S
= FOP(29, FMT_S
),
8394 OPC_MAX_S
= FOP(30, FMT_S
),
8395 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8396 OPC_MAXA_S
= FOP(31, FMT_S
),
8397 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8398 OPC_CVT_D_S
= FOP(33, FMT_S
),
8399 OPC_CVT_W_S
= FOP(36, FMT_S
),
8400 OPC_CVT_L_S
= FOP(37, FMT_S
),
8401 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8402 OPC_CMP_F_S
= FOP (48, FMT_S
),
8403 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8404 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8405 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8406 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8407 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8408 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8409 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8410 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8411 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8412 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8413 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8414 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8415 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8416 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8417 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8419 OPC_ADD_D
= FOP(0, FMT_D
),
8420 OPC_SUB_D
= FOP(1, FMT_D
),
8421 OPC_MUL_D
= FOP(2, FMT_D
),
8422 OPC_DIV_D
= FOP(3, FMT_D
),
8423 OPC_SQRT_D
= FOP(4, FMT_D
),
8424 OPC_ABS_D
= FOP(5, FMT_D
),
8425 OPC_MOV_D
= FOP(6, FMT_D
),
8426 OPC_NEG_D
= FOP(7, FMT_D
),
8427 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8428 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8429 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8430 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8431 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8432 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8433 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8434 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8435 OPC_SEL_D
= FOP(16, FMT_D
),
8436 OPC_MOVCF_D
= FOP(17, FMT_D
),
8437 OPC_MOVZ_D
= FOP(18, FMT_D
),
8438 OPC_MOVN_D
= FOP(19, FMT_D
),
8439 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8440 OPC_RECIP_D
= FOP(21, FMT_D
),
8441 OPC_RSQRT_D
= FOP(22, FMT_D
),
8442 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8443 OPC_MADDF_D
= FOP(24, FMT_D
),
8444 OPC_MSUBF_D
= FOP(25, FMT_D
),
8445 OPC_RINT_D
= FOP(26, FMT_D
),
8446 OPC_CLASS_D
= FOP(27, FMT_D
),
8447 OPC_MIN_D
= FOP(28, FMT_D
),
8448 OPC_RECIP2_D
= FOP(28, FMT_D
),
8449 OPC_MINA_D
= FOP(29, FMT_D
),
8450 OPC_RECIP1_D
= FOP(29, FMT_D
),
8451 OPC_MAX_D
= FOP(30, FMT_D
),
8452 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8453 OPC_MAXA_D
= FOP(31, FMT_D
),
8454 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8455 OPC_CVT_S_D
= FOP(32, FMT_D
),
8456 OPC_CVT_W_D
= FOP(36, FMT_D
),
8457 OPC_CVT_L_D
= FOP(37, FMT_D
),
8458 OPC_CMP_F_D
= FOP (48, FMT_D
),
8459 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8460 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8461 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8462 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8463 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8464 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8465 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8466 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8467 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8468 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8469 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8470 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8471 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8472 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8473 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8475 OPC_CVT_S_W
= FOP(32, FMT_W
),
8476 OPC_CVT_D_W
= FOP(33, FMT_W
),
8477 OPC_CVT_S_L
= FOP(32, FMT_L
),
8478 OPC_CVT_D_L
= FOP(33, FMT_L
),
8479 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8481 OPC_ADD_PS
= FOP(0, FMT_PS
),
8482 OPC_SUB_PS
= FOP(1, FMT_PS
),
8483 OPC_MUL_PS
= FOP(2, FMT_PS
),
8484 OPC_DIV_PS
= FOP(3, FMT_PS
),
8485 OPC_ABS_PS
= FOP(5, FMT_PS
),
8486 OPC_MOV_PS
= FOP(6, FMT_PS
),
8487 OPC_NEG_PS
= FOP(7, FMT_PS
),
8488 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8489 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8490 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8491 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8492 OPC_MULR_PS
= FOP(26, FMT_PS
),
8493 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8494 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8495 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8496 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8498 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8499 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8500 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8501 OPC_PLL_PS
= FOP(44, FMT_PS
),
8502 OPC_PLU_PS
= FOP(45, FMT_PS
),
8503 OPC_PUL_PS
= FOP(46, FMT_PS
),
8504 OPC_PUU_PS
= FOP(47, FMT_PS
),
8505 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8506 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8507 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8508 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8509 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8510 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8511 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8512 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8513 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8514 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8515 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8516 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8517 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8518 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8519 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8520 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8524 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8525 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8526 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8527 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8528 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8529 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8530 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8531 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8532 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8533 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8534 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8535 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8536 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8537 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8538 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8539 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8540 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8541 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8542 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8543 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8544 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8545 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8547 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8548 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8549 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8550 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8551 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8552 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8553 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8554 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8555 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8556 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8557 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8558 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8559 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8560 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8561 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8562 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8563 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8564 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8565 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8566 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8567 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8568 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8570 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8572 TCGv t0
= tcg_temp_new();
8577 TCGv_i32 fp0
= tcg_temp_new_i32();
8579 gen_load_fpr32(ctx
, fp0
, fs
);
8580 tcg_gen_ext_i32_tl(t0
, fp0
);
8581 tcg_temp_free_i32(fp0
);
8583 gen_store_gpr(t0
, rt
);
8586 gen_load_gpr(t0
, rt
);
8588 TCGv_i32 fp0
= tcg_temp_new_i32();
8590 tcg_gen_trunc_tl_i32(fp0
, t0
);
8591 gen_store_fpr32(ctx
, fp0
, fs
);
8592 tcg_temp_free_i32(fp0
);
8596 gen_helper_1e0i(cfc1
, t0
, fs
);
8597 gen_store_gpr(t0
, rt
);
8600 gen_load_gpr(t0
, rt
);
8601 save_cpu_state(ctx
, 0);
8603 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8605 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8606 tcg_temp_free_i32(fs_tmp
);
8608 /* Stop translation as we may have changed hflags */
8609 ctx
->bstate
= BS_STOP
;
8611 #if defined(TARGET_MIPS64)
8613 gen_load_fpr64(ctx
, t0
, fs
);
8614 gen_store_gpr(t0
, rt
);
8617 gen_load_gpr(t0
, rt
);
8618 gen_store_fpr64(ctx
, t0
, fs
);
8623 TCGv_i32 fp0
= tcg_temp_new_i32();
8625 gen_load_fpr32h(ctx
, fp0
, fs
);
8626 tcg_gen_ext_i32_tl(t0
, fp0
);
8627 tcg_temp_free_i32(fp0
);
8629 gen_store_gpr(t0
, rt
);
8632 gen_load_gpr(t0
, rt
);
8634 TCGv_i32 fp0
= tcg_temp_new_i32();
8636 tcg_gen_trunc_tl_i32(fp0
, t0
);
8637 gen_store_fpr32h(ctx
, fp0
, fs
);
8638 tcg_temp_free_i32(fp0
);
8642 MIPS_INVAL("cp1 move");
8643 generate_exception_end(ctx
, EXCP_RI
);
8651 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8667 l1
= gen_new_label();
8668 t0
= tcg_temp_new_i32();
8669 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8670 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8671 tcg_temp_free_i32(t0
);
8673 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8675 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8680 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8684 TCGv_i32 t0
= tcg_temp_new_i32();
8685 TCGLabel
*l1
= gen_new_label();
8692 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8693 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8694 gen_load_fpr32(ctx
, t0
, fs
);
8695 gen_store_fpr32(ctx
, t0
, fd
);
8697 tcg_temp_free_i32(t0
);
8700 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8703 TCGv_i32 t0
= tcg_temp_new_i32();
8705 TCGLabel
*l1
= gen_new_label();
8712 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8713 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8714 tcg_temp_free_i32(t0
);
8715 fp0
= tcg_temp_new_i64();
8716 gen_load_fpr64(ctx
, fp0
, fs
);
8717 gen_store_fpr64(ctx
, fp0
, fd
);
8718 tcg_temp_free_i64(fp0
);
8722 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8726 TCGv_i32 t0
= tcg_temp_new_i32();
8727 TCGLabel
*l1
= gen_new_label();
8728 TCGLabel
*l2
= gen_new_label();
8735 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8736 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8737 gen_load_fpr32(ctx
, t0
, fs
);
8738 gen_store_fpr32(ctx
, t0
, fd
);
8741 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8742 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8743 gen_load_fpr32h(ctx
, t0
, fs
);
8744 gen_store_fpr32h(ctx
, t0
, fd
);
8745 tcg_temp_free_i32(t0
);
8749 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8752 TCGv_i32 t1
= tcg_const_i32(0);
8753 TCGv_i32 fp0
= tcg_temp_new_i32();
8754 TCGv_i32 fp1
= tcg_temp_new_i32();
8755 TCGv_i32 fp2
= tcg_temp_new_i32();
8756 gen_load_fpr32(ctx
, fp0
, fd
);
8757 gen_load_fpr32(ctx
, fp1
, ft
);
8758 gen_load_fpr32(ctx
, fp2
, fs
);
8762 tcg_gen_andi_i32(fp0
, fp0
, 1);
8763 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8766 tcg_gen_andi_i32(fp1
, fp1
, 1);
8767 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8770 tcg_gen_andi_i32(fp1
, fp1
, 1);
8771 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8774 MIPS_INVAL("gen_sel_s");
8775 generate_exception_end(ctx
, EXCP_RI
);
8779 gen_store_fpr32(ctx
, fp0
, fd
);
8780 tcg_temp_free_i32(fp2
);
8781 tcg_temp_free_i32(fp1
);
8782 tcg_temp_free_i32(fp0
);
8783 tcg_temp_free_i32(t1
);
8786 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8789 TCGv_i64 t1
= tcg_const_i64(0);
8790 TCGv_i64 fp0
= tcg_temp_new_i64();
8791 TCGv_i64 fp1
= tcg_temp_new_i64();
8792 TCGv_i64 fp2
= tcg_temp_new_i64();
8793 gen_load_fpr64(ctx
, fp0
, fd
);
8794 gen_load_fpr64(ctx
, fp1
, ft
);
8795 gen_load_fpr64(ctx
, fp2
, fs
);
8799 tcg_gen_andi_i64(fp0
, fp0
, 1);
8800 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8803 tcg_gen_andi_i64(fp1
, fp1
, 1);
8804 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8807 tcg_gen_andi_i64(fp1
, fp1
, 1);
8808 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8811 MIPS_INVAL("gen_sel_d");
8812 generate_exception_end(ctx
, EXCP_RI
);
8816 gen_store_fpr64(ctx
, fp0
, fd
);
8817 tcg_temp_free_i64(fp2
);
8818 tcg_temp_free_i64(fp1
);
8819 tcg_temp_free_i64(fp0
);
8820 tcg_temp_free_i64(t1
);
8823 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8824 int ft
, int fs
, int fd
, int cc
)
8826 uint32_t func
= ctx
->opcode
& 0x3f;
8830 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 TCGv_i32 fp1
= tcg_temp_new_i32();
8833 gen_load_fpr32(ctx
, fp0
, fs
);
8834 gen_load_fpr32(ctx
, fp1
, ft
);
8835 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8836 tcg_temp_free_i32(fp1
);
8837 gen_store_fpr32(ctx
, fp0
, fd
);
8838 tcg_temp_free_i32(fp0
);
8843 TCGv_i32 fp0
= tcg_temp_new_i32();
8844 TCGv_i32 fp1
= tcg_temp_new_i32();
8846 gen_load_fpr32(ctx
, fp0
, fs
);
8847 gen_load_fpr32(ctx
, fp1
, ft
);
8848 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8849 tcg_temp_free_i32(fp1
);
8850 gen_store_fpr32(ctx
, fp0
, fd
);
8851 tcg_temp_free_i32(fp0
);
8856 TCGv_i32 fp0
= tcg_temp_new_i32();
8857 TCGv_i32 fp1
= tcg_temp_new_i32();
8859 gen_load_fpr32(ctx
, fp0
, fs
);
8860 gen_load_fpr32(ctx
, fp1
, ft
);
8861 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8862 tcg_temp_free_i32(fp1
);
8863 gen_store_fpr32(ctx
, fp0
, fd
);
8864 tcg_temp_free_i32(fp0
);
8869 TCGv_i32 fp0
= tcg_temp_new_i32();
8870 TCGv_i32 fp1
= tcg_temp_new_i32();
8872 gen_load_fpr32(ctx
, fp0
, fs
);
8873 gen_load_fpr32(ctx
, fp1
, ft
);
8874 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8875 tcg_temp_free_i32(fp1
);
8876 gen_store_fpr32(ctx
, fp0
, fd
);
8877 tcg_temp_free_i32(fp0
);
8882 TCGv_i32 fp0
= tcg_temp_new_i32();
8884 gen_load_fpr32(ctx
, fp0
, fs
);
8885 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8886 gen_store_fpr32(ctx
, fp0
, fd
);
8887 tcg_temp_free_i32(fp0
);
8892 TCGv_i32 fp0
= tcg_temp_new_i32();
8894 gen_load_fpr32(ctx
, fp0
, fs
);
8896 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
8898 gen_helper_float_abs_s(fp0
, fp0
);
8900 gen_store_fpr32(ctx
, fp0
, fd
);
8901 tcg_temp_free_i32(fp0
);
8906 TCGv_i32 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_store_fpr32(ctx
, fp0
, fd
);
8910 tcg_temp_free_i32(fp0
);
8915 TCGv_i32 fp0
= tcg_temp_new_i32();
8917 gen_load_fpr32(ctx
, fp0
, fs
);
8919 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
8921 gen_helper_float_chs_s(fp0
, fp0
);
8923 gen_store_fpr32(ctx
, fp0
, fd
);
8924 tcg_temp_free_i32(fp0
);
8928 check_cp1_64bitmode(ctx
);
8930 TCGv_i32 fp32
= tcg_temp_new_i32();
8931 TCGv_i64 fp64
= tcg_temp_new_i64();
8933 gen_load_fpr32(ctx
, fp32
, fs
);
8935 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
8937 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
8939 tcg_temp_free_i32(fp32
);
8940 gen_store_fpr64(ctx
, fp64
, fd
);
8941 tcg_temp_free_i64(fp64
);
8945 check_cp1_64bitmode(ctx
);
8947 TCGv_i32 fp32
= tcg_temp_new_i32();
8948 TCGv_i64 fp64
= tcg_temp_new_i64();
8950 gen_load_fpr32(ctx
, fp32
, fs
);
8952 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
8954 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
8956 tcg_temp_free_i32(fp32
);
8957 gen_store_fpr64(ctx
, fp64
, fd
);
8958 tcg_temp_free_i64(fp64
);
8962 check_cp1_64bitmode(ctx
);
8964 TCGv_i32 fp32
= tcg_temp_new_i32();
8965 TCGv_i64 fp64
= tcg_temp_new_i64();
8967 gen_load_fpr32(ctx
, fp32
, fs
);
8969 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
8971 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
8973 tcg_temp_free_i32(fp32
);
8974 gen_store_fpr64(ctx
, fp64
, fd
);
8975 tcg_temp_free_i64(fp64
);
8979 check_cp1_64bitmode(ctx
);
8981 TCGv_i32 fp32
= tcg_temp_new_i32();
8982 TCGv_i64 fp64
= tcg_temp_new_i64();
8984 gen_load_fpr32(ctx
, fp32
, fs
);
8986 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
8988 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
8990 tcg_temp_free_i32(fp32
);
8991 gen_store_fpr64(ctx
, fp64
, fd
);
8992 tcg_temp_free_i64(fp64
);
8997 TCGv_i32 fp0
= tcg_temp_new_i32();
8999 gen_load_fpr32(ctx
, fp0
, fs
);
9001 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9003 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9005 gen_store_fpr32(ctx
, fp0
, fd
);
9006 tcg_temp_free_i32(fp0
);
9011 TCGv_i32 fp0
= tcg_temp_new_i32();
9013 gen_load_fpr32(ctx
, fp0
, fs
);
9015 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9017 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9019 gen_store_fpr32(ctx
, fp0
, fd
);
9020 tcg_temp_free_i32(fp0
);
9025 TCGv_i32 fp0
= tcg_temp_new_i32();
9027 gen_load_fpr32(ctx
, fp0
, fs
);
9029 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9031 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9033 gen_store_fpr32(ctx
, fp0
, fd
);
9034 tcg_temp_free_i32(fp0
);
9039 TCGv_i32 fp0
= tcg_temp_new_i32();
9041 gen_load_fpr32(ctx
, fp0
, fs
);
9043 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9045 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9047 gen_store_fpr32(ctx
, fp0
, fd
);
9048 tcg_temp_free_i32(fp0
);
9052 check_insn(ctx
, ISA_MIPS32R6
);
9053 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9056 check_insn(ctx
, ISA_MIPS32R6
);
9057 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9060 check_insn(ctx
, ISA_MIPS32R6
);
9061 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9065 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9070 TCGLabel
*l1
= gen_new_label();
9074 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9076 fp0
= tcg_temp_new_i32();
9077 gen_load_fpr32(ctx
, fp0
, fs
);
9078 gen_store_fpr32(ctx
, fp0
, fd
);
9079 tcg_temp_free_i32(fp0
);
9084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9086 TCGLabel
*l1
= gen_new_label();
9090 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9091 fp0
= tcg_temp_new_i32();
9092 gen_load_fpr32(ctx
, fp0
, fs
);
9093 gen_store_fpr32(ctx
, fp0
, fd
);
9094 tcg_temp_free_i32(fp0
);
9101 TCGv_i32 fp0
= tcg_temp_new_i32();
9103 gen_load_fpr32(ctx
, fp0
, fs
);
9104 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9105 gen_store_fpr32(ctx
, fp0
, fd
);
9106 tcg_temp_free_i32(fp0
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9113 gen_load_fpr32(ctx
, fp0
, fs
);
9114 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9115 gen_store_fpr32(ctx
, fp0
, fd
);
9116 tcg_temp_free_i32(fp0
);
9120 check_insn(ctx
, ISA_MIPS32R6
);
9122 TCGv_i32 fp0
= tcg_temp_new_i32();
9123 TCGv_i32 fp1
= tcg_temp_new_i32();
9124 TCGv_i32 fp2
= tcg_temp_new_i32();
9125 gen_load_fpr32(ctx
, fp0
, fs
);
9126 gen_load_fpr32(ctx
, fp1
, ft
);
9127 gen_load_fpr32(ctx
, fp2
, fd
);
9128 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9129 gen_store_fpr32(ctx
, fp2
, fd
);
9130 tcg_temp_free_i32(fp2
);
9131 tcg_temp_free_i32(fp1
);
9132 tcg_temp_free_i32(fp0
);
9136 check_insn(ctx
, ISA_MIPS32R6
);
9138 TCGv_i32 fp0
= tcg_temp_new_i32();
9139 TCGv_i32 fp1
= tcg_temp_new_i32();
9140 TCGv_i32 fp2
= tcg_temp_new_i32();
9141 gen_load_fpr32(ctx
, fp0
, fs
);
9142 gen_load_fpr32(ctx
, fp1
, ft
);
9143 gen_load_fpr32(ctx
, fp2
, fd
);
9144 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9145 gen_store_fpr32(ctx
, fp2
, fd
);
9146 tcg_temp_free_i32(fp2
);
9147 tcg_temp_free_i32(fp1
);
9148 tcg_temp_free_i32(fp0
);
9152 check_insn(ctx
, ISA_MIPS32R6
);
9154 TCGv_i32 fp0
= tcg_temp_new_i32();
9155 gen_load_fpr32(ctx
, fp0
, fs
);
9156 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9157 gen_store_fpr32(ctx
, fp0
, fd
);
9158 tcg_temp_free_i32(fp0
);
9162 check_insn(ctx
, ISA_MIPS32R6
);
9164 TCGv_i32 fp0
= tcg_temp_new_i32();
9165 gen_load_fpr32(ctx
, fp0
, fs
);
9166 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9167 gen_store_fpr32(ctx
, fp0
, fd
);
9168 tcg_temp_free_i32(fp0
);
9171 case OPC_MIN_S
: /* OPC_RECIP2_S */
9172 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9174 TCGv_i32 fp0
= tcg_temp_new_i32();
9175 TCGv_i32 fp1
= tcg_temp_new_i32();
9176 TCGv_i32 fp2
= tcg_temp_new_i32();
9177 gen_load_fpr32(ctx
, fp0
, fs
);
9178 gen_load_fpr32(ctx
, fp1
, ft
);
9179 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9180 gen_store_fpr32(ctx
, fp2
, fd
);
9181 tcg_temp_free_i32(fp2
);
9182 tcg_temp_free_i32(fp1
);
9183 tcg_temp_free_i32(fp0
);
9186 check_cp1_64bitmode(ctx
);
9188 TCGv_i32 fp0
= tcg_temp_new_i32();
9189 TCGv_i32 fp1
= tcg_temp_new_i32();
9191 gen_load_fpr32(ctx
, fp0
, fs
);
9192 gen_load_fpr32(ctx
, fp1
, ft
);
9193 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9194 tcg_temp_free_i32(fp1
);
9195 gen_store_fpr32(ctx
, fp0
, fd
);
9196 tcg_temp_free_i32(fp0
);
9200 case OPC_MINA_S
: /* OPC_RECIP1_S */
9201 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9203 TCGv_i32 fp0
= tcg_temp_new_i32();
9204 TCGv_i32 fp1
= tcg_temp_new_i32();
9205 TCGv_i32 fp2
= tcg_temp_new_i32();
9206 gen_load_fpr32(ctx
, fp0
, fs
);
9207 gen_load_fpr32(ctx
, fp1
, ft
);
9208 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9209 gen_store_fpr32(ctx
, fp2
, fd
);
9210 tcg_temp_free_i32(fp2
);
9211 tcg_temp_free_i32(fp1
);
9212 tcg_temp_free_i32(fp0
);
9215 check_cp1_64bitmode(ctx
);
9217 TCGv_i32 fp0
= tcg_temp_new_i32();
9219 gen_load_fpr32(ctx
, fp0
, fs
);
9220 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9221 gen_store_fpr32(ctx
, fp0
, fd
);
9222 tcg_temp_free_i32(fp0
);
9226 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9227 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9229 TCGv_i32 fp0
= tcg_temp_new_i32();
9230 TCGv_i32 fp1
= tcg_temp_new_i32();
9231 gen_load_fpr32(ctx
, fp0
, fs
);
9232 gen_load_fpr32(ctx
, fp1
, ft
);
9233 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9234 gen_store_fpr32(ctx
, fp1
, fd
);
9235 tcg_temp_free_i32(fp1
);
9236 tcg_temp_free_i32(fp0
);
9239 check_cp1_64bitmode(ctx
);
9241 TCGv_i32 fp0
= tcg_temp_new_i32();
9243 gen_load_fpr32(ctx
, fp0
, fs
);
9244 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9245 gen_store_fpr32(ctx
, fp0
, fd
);
9246 tcg_temp_free_i32(fp0
);
9250 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9251 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9253 TCGv_i32 fp0
= tcg_temp_new_i32();
9254 TCGv_i32 fp1
= tcg_temp_new_i32();
9255 gen_load_fpr32(ctx
, fp0
, fs
);
9256 gen_load_fpr32(ctx
, fp1
, ft
);
9257 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9258 gen_store_fpr32(ctx
, fp1
, fd
);
9259 tcg_temp_free_i32(fp1
);
9260 tcg_temp_free_i32(fp0
);
9263 check_cp1_64bitmode(ctx
);
9265 TCGv_i32 fp0
= tcg_temp_new_i32();
9266 TCGv_i32 fp1
= tcg_temp_new_i32();
9268 gen_load_fpr32(ctx
, fp0
, fs
);
9269 gen_load_fpr32(ctx
, fp1
, ft
);
9270 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9271 tcg_temp_free_i32(fp1
);
9272 gen_store_fpr32(ctx
, fp0
, fd
);
9273 tcg_temp_free_i32(fp0
);
9278 check_cp1_registers(ctx
, fd
);
9280 TCGv_i32 fp32
= tcg_temp_new_i32();
9281 TCGv_i64 fp64
= tcg_temp_new_i64();
9283 gen_load_fpr32(ctx
, fp32
, fs
);
9284 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9285 tcg_temp_free_i32(fp32
);
9286 gen_store_fpr64(ctx
, fp64
, fd
);
9287 tcg_temp_free_i64(fp64
);
9292 TCGv_i32 fp0
= tcg_temp_new_i32();
9294 gen_load_fpr32(ctx
, fp0
, fs
);
9296 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9298 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9300 gen_store_fpr32(ctx
, fp0
, fd
);
9301 tcg_temp_free_i32(fp0
);
9305 check_cp1_64bitmode(ctx
);
9307 TCGv_i32 fp32
= tcg_temp_new_i32();
9308 TCGv_i64 fp64
= tcg_temp_new_i64();
9310 gen_load_fpr32(ctx
, fp32
, fs
);
9312 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9314 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9316 tcg_temp_free_i32(fp32
);
9317 gen_store_fpr64(ctx
, fp64
, fd
);
9318 tcg_temp_free_i64(fp64
);
9324 TCGv_i64 fp64
= tcg_temp_new_i64();
9325 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9326 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9328 gen_load_fpr32(ctx
, fp32_0
, fs
);
9329 gen_load_fpr32(ctx
, fp32_1
, ft
);
9330 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9331 tcg_temp_free_i32(fp32_1
);
9332 tcg_temp_free_i32(fp32_0
);
9333 gen_store_fpr64(ctx
, fp64
, fd
);
9334 tcg_temp_free_i64(fp64
);
9346 case OPC_CMP_NGLE_S
:
9353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9354 if (ctx
->opcode
& (1 << 6)) {
9355 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9357 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9361 check_cp1_registers(ctx
, fs
| ft
| fd
);
9363 TCGv_i64 fp0
= tcg_temp_new_i64();
9364 TCGv_i64 fp1
= tcg_temp_new_i64();
9366 gen_load_fpr64(ctx
, fp0
, fs
);
9367 gen_load_fpr64(ctx
, fp1
, ft
);
9368 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9369 tcg_temp_free_i64(fp1
);
9370 gen_store_fpr64(ctx
, fp0
, fd
);
9371 tcg_temp_free_i64(fp0
);
9375 check_cp1_registers(ctx
, fs
| ft
| fd
);
9377 TCGv_i64 fp0
= tcg_temp_new_i64();
9378 TCGv_i64 fp1
= tcg_temp_new_i64();
9380 gen_load_fpr64(ctx
, fp0
, fs
);
9381 gen_load_fpr64(ctx
, fp1
, ft
);
9382 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9383 tcg_temp_free_i64(fp1
);
9384 gen_store_fpr64(ctx
, fp0
, fd
);
9385 tcg_temp_free_i64(fp0
);
9389 check_cp1_registers(ctx
, fs
| ft
| fd
);
9391 TCGv_i64 fp0
= tcg_temp_new_i64();
9392 TCGv_i64 fp1
= tcg_temp_new_i64();
9394 gen_load_fpr64(ctx
, fp0
, fs
);
9395 gen_load_fpr64(ctx
, fp1
, ft
);
9396 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9397 tcg_temp_free_i64(fp1
);
9398 gen_store_fpr64(ctx
, fp0
, fd
);
9399 tcg_temp_free_i64(fp0
);
9403 check_cp1_registers(ctx
, fs
| ft
| fd
);
9405 TCGv_i64 fp0
= tcg_temp_new_i64();
9406 TCGv_i64 fp1
= tcg_temp_new_i64();
9408 gen_load_fpr64(ctx
, fp0
, fs
);
9409 gen_load_fpr64(ctx
, fp1
, ft
);
9410 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9411 tcg_temp_free_i64(fp1
);
9412 gen_store_fpr64(ctx
, fp0
, fd
);
9413 tcg_temp_free_i64(fp0
);
9417 check_cp1_registers(ctx
, fs
| fd
);
9419 TCGv_i64 fp0
= tcg_temp_new_i64();
9421 gen_load_fpr64(ctx
, fp0
, fs
);
9422 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9423 gen_store_fpr64(ctx
, fp0
, fd
);
9424 tcg_temp_free_i64(fp0
);
9428 check_cp1_registers(ctx
, fs
| fd
);
9430 TCGv_i64 fp0
= tcg_temp_new_i64();
9432 gen_load_fpr64(ctx
, fp0
, fs
);
9434 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9436 gen_helper_float_abs_d(fp0
, fp0
);
9438 gen_store_fpr64(ctx
, fp0
, fd
);
9439 tcg_temp_free_i64(fp0
);
9443 check_cp1_registers(ctx
, fs
| fd
);
9445 TCGv_i64 fp0
= tcg_temp_new_i64();
9447 gen_load_fpr64(ctx
, fp0
, fs
);
9448 gen_store_fpr64(ctx
, fp0
, fd
);
9449 tcg_temp_free_i64(fp0
);
9453 check_cp1_registers(ctx
, fs
| fd
);
9455 TCGv_i64 fp0
= tcg_temp_new_i64();
9457 gen_load_fpr64(ctx
, fp0
, fs
);
9459 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9461 gen_helper_float_chs_d(fp0
, fp0
);
9463 gen_store_fpr64(ctx
, fp0
, fd
);
9464 tcg_temp_free_i64(fp0
);
9468 check_cp1_64bitmode(ctx
);
9470 TCGv_i64 fp0
= tcg_temp_new_i64();
9472 gen_load_fpr64(ctx
, fp0
, fs
);
9474 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9476 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9478 gen_store_fpr64(ctx
, fp0
, fd
);
9479 tcg_temp_free_i64(fp0
);
9483 check_cp1_64bitmode(ctx
);
9485 TCGv_i64 fp0
= tcg_temp_new_i64();
9487 gen_load_fpr64(ctx
, fp0
, fs
);
9489 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9491 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9493 gen_store_fpr64(ctx
, fp0
, fd
);
9494 tcg_temp_free_i64(fp0
);
9498 check_cp1_64bitmode(ctx
);
9500 TCGv_i64 fp0
= tcg_temp_new_i64();
9502 gen_load_fpr64(ctx
, fp0
, fs
);
9504 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9506 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9508 gen_store_fpr64(ctx
, fp0
, fd
);
9509 tcg_temp_free_i64(fp0
);
9513 check_cp1_64bitmode(ctx
);
9515 TCGv_i64 fp0
= tcg_temp_new_i64();
9517 gen_load_fpr64(ctx
, fp0
, fs
);
9519 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9521 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9523 gen_store_fpr64(ctx
, fp0
, fd
);
9524 tcg_temp_free_i64(fp0
);
9528 check_cp1_registers(ctx
, fs
);
9530 TCGv_i32 fp32
= tcg_temp_new_i32();
9531 TCGv_i64 fp64
= tcg_temp_new_i64();
9533 gen_load_fpr64(ctx
, fp64
, fs
);
9535 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9537 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9539 tcg_temp_free_i64(fp64
);
9540 gen_store_fpr32(ctx
, fp32
, fd
);
9541 tcg_temp_free_i32(fp32
);
9545 check_cp1_registers(ctx
, fs
);
9547 TCGv_i32 fp32
= tcg_temp_new_i32();
9548 TCGv_i64 fp64
= tcg_temp_new_i64();
9550 gen_load_fpr64(ctx
, fp64
, fs
);
9552 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9554 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9556 tcg_temp_free_i64(fp64
);
9557 gen_store_fpr32(ctx
, fp32
, fd
);
9558 tcg_temp_free_i32(fp32
);
9562 check_cp1_registers(ctx
, fs
);
9564 TCGv_i32 fp32
= tcg_temp_new_i32();
9565 TCGv_i64 fp64
= tcg_temp_new_i64();
9567 gen_load_fpr64(ctx
, fp64
, fs
);
9569 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9571 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9573 tcg_temp_free_i64(fp64
);
9574 gen_store_fpr32(ctx
, fp32
, fd
);
9575 tcg_temp_free_i32(fp32
);
9579 check_cp1_registers(ctx
, fs
);
9581 TCGv_i32 fp32
= tcg_temp_new_i32();
9582 TCGv_i64 fp64
= tcg_temp_new_i64();
9584 gen_load_fpr64(ctx
, fp64
, fs
);
9586 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9588 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9590 tcg_temp_free_i64(fp64
);
9591 gen_store_fpr32(ctx
, fp32
, fd
);
9592 tcg_temp_free_i32(fp32
);
9596 check_insn(ctx
, ISA_MIPS32R6
);
9597 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9600 check_insn(ctx
, ISA_MIPS32R6
);
9601 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9604 check_insn(ctx
, ISA_MIPS32R6
);
9605 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9608 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9609 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9614 TCGLabel
*l1
= gen_new_label();
9618 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9620 fp0
= tcg_temp_new_i64();
9621 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_store_fpr64(ctx
, fp0
, fd
);
9623 tcg_temp_free_i64(fp0
);
9628 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9630 TCGLabel
*l1
= gen_new_label();
9634 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9635 fp0
= tcg_temp_new_i64();
9636 gen_load_fpr64(ctx
, fp0
, fs
);
9637 gen_store_fpr64(ctx
, fp0
, fd
);
9638 tcg_temp_free_i64(fp0
);
9644 check_cp1_registers(ctx
, fs
| fd
);
9646 TCGv_i64 fp0
= tcg_temp_new_i64();
9648 gen_load_fpr64(ctx
, fp0
, fs
);
9649 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9650 gen_store_fpr64(ctx
, fp0
, fd
);
9651 tcg_temp_free_i64(fp0
);
9655 check_cp1_registers(ctx
, fs
| fd
);
9657 TCGv_i64 fp0
= tcg_temp_new_i64();
9659 gen_load_fpr64(ctx
, fp0
, fs
);
9660 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9661 gen_store_fpr64(ctx
, fp0
, fd
);
9662 tcg_temp_free_i64(fp0
);
9666 check_insn(ctx
, ISA_MIPS32R6
);
9668 TCGv_i64 fp0
= tcg_temp_new_i64();
9669 TCGv_i64 fp1
= tcg_temp_new_i64();
9670 TCGv_i64 fp2
= tcg_temp_new_i64();
9671 gen_load_fpr64(ctx
, fp0
, fs
);
9672 gen_load_fpr64(ctx
, fp1
, ft
);
9673 gen_load_fpr64(ctx
, fp2
, fd
);
9674 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9675 gen_store_fpr64(ctx
, fp2
, fd
);
9676 tcg_temp_free_i64(fp2
);
9677 tcg_temp_free_i64(fp1
);
9678 tcg_temp_free_i64(fp0
);
9682 check_insn(ctx
, ISA_MIPS32R6
);
9684 TCGv_i64 fp0
= tcg_temp_new_i64();
9685 TCGv_i64 fp1
= tcg_temp_new_i64();
9686 TCGv_i64 fp2
= tcg_temp_new_i64();
9687 gen_load_fpr64(ctx
, fp0
, fs
);
9688 gen_load_fpr64(ctx
, fp1
, ft
);
9689 gen_load_fpr64(ctx
, fp2
, fd
);
9690 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9691 gen_store_fpr64(ctx
, fp2
, fd
);
9692 tcg_temp_free_i64(fp2
);
9693 tcg_temp_free_i64(fp1
);
9694 tcg_temp_free_i64(fp0
);
9698 check_insn(ctx
, ISA_MIPS32R6
);
9700 TCGv_i64 fp0
= tcg_temp_new_i64();
9701 gen_load_fpr64(ctx
, fp0
, fs
);
9702 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9703 gen_store_fpr64(ctx
, fp0
, fd
);
9704 tcg_temp_free_i64(fp0
);
9708 check_insn(ctx
, ISA_MIPS32R6
);
9710 TCGv_i64 fp0
= tcg_temp_new_i64();
9711 gen_load_fpr64(ctx
, fp0
, fs
);
9712 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9713 gen_store_fpr64(ctx
, fp0
, fd
);
9714 tcg_temp_free_i64(fp0
);
9717 case OPC_MIN_D
: /* OPC_RECIP2_D */
9718 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9720 TCGv_i64 fp0
= tcg_temp_new_i64();
9721 TCGv_i64 fp1
= tcg_temp_new_i64();
9722 gen_load_fpr64(ctx
, fp0
, fs
);
9723 gen_load_fpr64(ctx
, fp1
, ft
);
9724 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9725 gen_store_fpr64(ctx
, fp1
, fd
);
9726 tcg_temp_free_i64(fp1
);
9727 tcg_temp_free_i64(fp0
);
9730 check_cp1_64bitmode(ctx
);
9732 TCGv_i64 fp0
= tcg_temp_new_i64();
9733 TCGv_i64 fp1
= tcg_temp_new_i64();
9735 gen_load_fpr64(ctx
, fp0
, fs
);
9736 gen_load_fpr64(ctx
, fp1
, ft
);
9737 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9738 tcg_temp_free_i64(fp1
);
9739 gen_store_fpr64(ctx
, fp0
, fd
);
9740 tcg_temp_free_i64(fp0
);
9744 case OPC_MINA_D
: /* OPC_RECIP1_D */
9745 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9747 TCGv_i64 fp0
= tcg_temp_new_i64();
9748 TCGv_i64 fp1
= tcg_temp_new_i64();
9749 gen_load_fpr64(ctx
, fp0
, fs
);
9750 gen_load_fpr64(ctx
, fp1
, ft
);
9751 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9752 gen_store_fpr64(ctx
, fp1
, fd
);
9753 tcg_temp_free_i64(fp1
);
9754 tcg_temp_free_i64(fp0
);
9757 check_cp1_64bitmode(ctx
);
9759 TCGv_i64 fp0
= tcg_temp_new_i64();
9761 gen_load_fpr64(ctx
, fp0
, fs
);
9762 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9763 gen_store_fpr64(ctx
, fp0
, fd
);
9764 tcg_temp_free_i64(fp0
);
9768 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9769 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9771 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 TCGv_i64 fp1
= tcg_temp_new_i64();
9773 gen_load_fpr64(ctx
, fp0
, fs
);
9774 gen_load_fpr64(ctx
, fp1
, ft
);
9775 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9776 gen_store_fpr64(ctx
, fp1
, fd
);
9777 tcg_temp_free_i64(fp1
);
9778 tcg_temp_free_i64(fp0
);
9781 check_cp1_64bitmode(ctx
);
9783 TCGv_i64 fp0
= tcg_temp_new_i64();
9785 gen_load_fpr64(ctx
, fp0
, fs
);
9786 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9787 gen_store_fpr64(ctx
, fp0
, fd
);
9788 tcg_temp_free_i64(fp0
);
9792 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9793 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9795 TCGv_i64 fp0
= tcg_temp_new_i64();
9796 TCGv_i64 fp1
= tcg_temp_new_i64();
9797 gen_load_fpr64(ctx
, fp0
, fs
);
9798 gen_load_fpr64(ctx
, fp1
, ft
);
9799 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9800 gen_store_fpr64(ctx
, fp1
, fd
);
9801 tcg_temp_free_i64(fp1
);
9802 tcg_temp_free_i64(fp0
);
9805 check_cp1_64bitmode(ctx
);
9807 TCGv_i64 fp0
= tcg_temp_new_i64();
9808 TCGv_i64 fp1
= tcg_temp_new_i64();
9810 gen_load_fpr64(ctx
, fp0
, fs
);
9811 gen_load_fpr64(ctx
, fp1
, ft
);
9812 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9813 tcg_temp_free_i64(fp1
);
9814 gen_store_fpr64(ctx
, fp0
, fd
);
9815 tcg_temp_free_i64(fp0
);
9828 case OPC_CMP_NGLE_D
:
9835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9836 if (ctx
->opcode
& (1 << 6)) {
9837 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9839 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9843 check_cp1_registers(ctx
, fs
);
9845 TCGv_i32 fp32
= tcg_temp_new_i32();
9846 TCGv_i64 fp64
= tcg_temp_new_i64();
9848 gen_load_fpr64(ctx
, fp64
, fs
);
9849 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9850 tcg_temp_free_i64(fp64
);
9851 gen_store_fpr32(ctx
, fp32
, fd
);
9852 tcg_temp_free_i32(fp32
);
9856 check_cp1_registers(ctx
, fs
);
9858 TCGv_i32 fp32
= tcg_temp_new_i32();
9859 TCGv_i64 fp64
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp64
, fs
);
9863 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
9865 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
9867 tcg_temp_free_i64(fp64
);
9868 gen_store_fpr32(ctx
, fp32
, fd
);
9869 tcg_temp_free_i32(fp32
);
9873 check_cp1_64bitmode(ctx
);
9875 TCGv_i64 fp0
= tcg_temp_new_i64();
9877 gen_load_fpr64(ctx
, fp0
, fs
);
9879 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
9881 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
9883 gen_store_fpr64(ctx
, fp0
, fd
);
9884 tcg_temp_free_i64(fp0
);
9889 TCGv_i32 fp0
= tcg_temp_new_i32();
9891 gen_load_fpr32(ctx
, fp0
, fs
);
9892 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9893 gen_store_fpr32(ctx
, fp0
, fd
);
9894 tcg_temp_free_i32(fp0
);
9898 check_cp1_registers(ctx
, fd
);
9900 TCGv_i32 fp32
= tcg_temp_new_i32();
9901 TCGv_i64 fp64
= tcg_temp_new_i64();
9903 gen_load_fpr32(ctx
, fp32
, fs
);
9904 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9905 tcg_temp_free_i32(fp32
);
9906 gen_store_fpr64(ctx
, fp64
, fd
);
9907 tcg_temp_free_i64(fp64
);
9911 check_cp1_64bitmode(ctx
);
9913 TCGv_i32 fp32
= tcg_temp_new_i32();
9914 TCGv_i64 fp64
= tcg_temp_new_i64();
9916 gen_load_fpr64(ctx
, fp64
, fs
);
9917 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9918 tcg_temp_free_i64(fp64
);
9919 gen_store_fpr32(ctx
, fp32
, fd
);
9920 tcg_temp_free_i32(fp32
);
9924 check_cp1_64bitmode(ctx
);
9926 TCGv_i64 fp0
= tcg_temp_new_i64();
9928 gen_load_fpr64(ctx
, fp0
, fs
);
9929 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9930 gen_store_fpr64(ctx
, fp0
, fd
);
9931 tcg_temp_free_i64(fp0
);
9937 TCGv_i64 fp0
= tcg_temp_new_i64();
9939 gen_load_fpr64(ctx
, fp0
, fs
);
9940 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9941 gen_store_fpr64(ctx
, fp0
, fd
);
9942 tcg_temp_free_i64(fp0
);
9948 TCGv_i64 fp0
= tcg_temp_new_i64();
9949 TCGv_i64 fp1
= tcg_temp_new_i64();
9951 gen_load_fpr64(ctx
, fp0
, fs
);
9952 gen_load_fpr64(ctx
, fp1
, ft
);
9953 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9954 tcg_temp_free_i64(fp1
);
9955 gen_store_fpr64(ctx
, fp0
, fd
);
9956 tcg_temp_free_i64(fp0
);
9962 TCGv_i64 fp0
= tcg_temp_new_i64();
9963 TCGv_i64 fp1
= tcg_temp_new_i64();
9965 gen_load_fpr64(ctx
, fp0
, fs
);
9966 gen_load_fpr64(ctx
, fp1
, ft
);
9967 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9968 tcg_temp_free_i64(fp1
);
9969 gen_store_fpr64(ctx
, fp0
, fd
);
9970 tcg_temp_free_i64(fp0
);
9976 TCGv_i64 fp0
= tcg_temp_new_i64();
9977 TCGv_i64 fp1
= tcg_temp_new_i64();
9979 gen_load_fpr64(ctx
, fp0
, fs
);
9980 gen_load_fpr64(ctx
, fp1
, ft
);
9981 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9982 tcg_temp_free_i64(fp1
);
9983 gen_store_fpr64(ctx
, fp0
, fd
);
9984 tcg_temp_free_i64(fp0
);
9990 TCGv_i64 fp0
= tcg_temp_new_i64();
9992 gen_load_fpr64(ctx
, fp0
, fs
);
9993 gen_helper_float_abs_ps(fp0
, fp0
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
10001 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 gen_load_fpr64(ctx
, fp0
, fs
);
10004 gen_store_fpr64(ctx
, fp0
, fd
);
10005 tcg_temp_free_i64(fp0
);
10011 TCGv_i64 fp0
= tcg_temp_new_i64();
10013 gen_load_fpr64(ctx
, fp0
, fs
);
10014 gen_helper_float_chs_ps(fp0
, fp0
);
10015 gen_store_fpr64(ctx
, fp0
, fd
);
10016 tcg_temp_free_i64(fp0
);
10021 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10026 TCGLabel
*l1
= gen_new_label();
10030 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10031 fp0
= tcg_temp_new_i64();
10032 gen_load_fpr64(ctx
, fp0
, fs
);
10033 gen_store_fpr64(ctx
, fp0
, fd
);
10034 tcg_temp_free_i64(fp0
);
10041 TCGLabel
*l1
= gen_new_label();
10045 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10046 fp0
= tcg_temp_new_i64();
10047 gen_load_fpr64(ctx
, fp0
, fs
);
10048 gen_store_fpr64(ctx
, fp0
, fd
);
10049 tcg_temp_free_i64(fp0
);
10057 TCGv_i64 fp0
= tcg_temp_new_i64();
10058 TCGv_i64 fp1
= tcg_temp_new_i64();
10060 gen_load_fpr64(ctx
, fp0
, ft
);
10061 gen_load_fpr64(ctx
, fp1
, fs
);
10062 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10063 tcg_temp_free_i64(fp1
);
10064 gen_store_fpr64(ctx
, fp0
, fd
);
10065 tcg_temp_free_i64(fp0
);
10071 TCGv_i64 fp0
= tcg_temp_new_i64();
10072 TCGv_i64 fp1
= tcg_temp_new_i64();
10074 gen_load_fpr64(ctx
, fp0
, ft
);
10075 gen_load_fpr64(ctx
, fp1
, fs
);
10076 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10077 tcg_temp_free_i64(fp1
);
10078 gen_store_fpr64(ctx
, fp0
, fd
);
10079 tcg_temp_free_i64(fp0
);
10082 case OPC_RECIP2_PS
:
10085 TCGv_i64 fp0
= tcg_temp_new_i64();
10086 TCGv_i64 fp1
= tcg_temp_new_i64();
10088 gen_load_fpr64(ctx
, fp0
, fs
);
10089 gen_load_fpr64(ctx
, fp1
, ft
);
10090 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10091 tcg_temp_free_i64(fp1
);
10092 gen_store_fpr64(ctx
, fp0
, fd
);
10093 tcg_temp_free_i64(fp0
);
10096 case OPC_RECIP1_PS
:
10099 TCGv_i64 fp0
= tcg_temp_new_i64();
10101 gen_load_fpr64(ctx
, fp0
, fs
);
10102 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10103 gen_store_fpr64(ctx
, fp0
, fd
);
10104 tcg_temp_free_i64(fp0
);
10107 case OPC_RSQRT1_PS
:
10110 TCGv_i64 fp0
= tcg_temp_new_i64();
10112 gen_load_fpr64(ctx
, fp0
, fs
);
10113 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10114 gen_store_fpr64(ctx
, fp0
, fd
);
10115 tcg_temp_free_i64(fp0
);
10118 case OPC_RSQRT2_PS
:
10121 TCGv_i64 fp0
= tcg_temp_new_i64();
10122 TCGv_i64 fp1
= tcg_temp_new_i64();
10124 gen_load_fpr64(ctx
, fp0
, fs
);
10125 gen_load_fpr64(ctx
, fp1
, ft
);
10126 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10127 tcg_temp_free_i64(fp1
);
10128 gen_store_fpr64(ctx
, fp0
, fd
);
10129 tcg_temp_free_i64(fp0
);
10133 check_cp1_64bitmode(ctx
);
10135 TCGv_i32 fp0
= tcg_temp_new_i32();
10137 gen_load_fpr32h(ctx
, fp0
, fs
);
10138 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10139 gen_store_fpr32(ctx
, fp0
, fd
);
10140 tcg_temp_free_i32(fp0
);
10143 case OPC_CVT_PW_PS
:
10146 TCGv_i64 fp0
= tcg_temp_new_i64();
10148 gen_load_fpr64(ctx
, fp0
, fs
);
10149 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10150 gen_store_fpr64(ctx
, fp0
, fd
);
10151 tcg_temp_free_i64(fp0
);
10155 check_cp1_64bitmode(ctx
);
10157 TCGv_i32 fp0
= tcg_temp_new_i32();
10159 gen_load_fpr32(ctx
, fp0
, fs
);
10160 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10161 gen_store_fpr32(ctx
, fp0
, fd
);
10162 tcg_temp_free_i32(fp0
);
10168 TCGv_i32 fp0
= tcg_temp_new_i32();
10169 TCGv_i32 fp1
= tcg_temp_new_i32();
10171 gen_load_fpr32(ctx
, fp0
, fs
);
10172 gen_load_fpr32(ctx
, fp1
, ft
);
10173 gen_store_fpr32h(ctx
, fp0
, fd
);
10174 gen_store_fpr32(ctx
, fp1
, fd
);
10175 tcg_temp_free_i32(fp0
);
10176 tcg_temp_free_i32(fp1
);
10182 TCGv_i32 fp0
= tcg_temp_new_i32();
10183 TCGv_i32 fp1
= tcg_temp_new_i32();
10185 gen_load_fpr32(ctx
, fp0
, fs
);
10186 gen_load_fpr32h(ctx
, fp1
, ft
);
10187 gen_store_fpr32(ctx
, fp1
, fd
);
10188 gen_store_fpr32h(ctx
, fp0
, fd
);
10189 tcg_temp_free_i32(fp0
);
10190 tcg_temp_free_i32(fp1
);
10196 TCGv_i32 fp0
= tcg_temp_new_i32();
10197 TCGv_i32 fp1
= tcg_temp_new_i32();
10199 gen_load_fpr32h(ctx
, fp0
, fs
);
10200 gen_load_fpr32(ctx
, fp1
, ft
);
10201 gen_store_fpr32(ctx
, fp1
, fd
);
10202 gen_store_fpr32h(ctx
, fp0
, fd
);
10203 tcg_temp_free_i32(fp0
);
10204 tcg_temp_free_i32(fp1
);
10210 TCGv_i32 fp0
= tcg_temp_new_i32();
10211 TCGv_i32 fp1
= tcg_temp_new_i32();
10213 gen_load_fpr32h(ctx
, fp0
, fs
);
10214 gen_load_fpr32h(ctx
, fp1
, ft
);
10215 gen_store_fpr32(ctx
, fp1
, fd
);
10216 gen_store_fpr32h(ctx
, fp0
, fd
);
10217 tcg_temp_free_i32(fp0
);
10218 tcg_temp_free_i32(fp1
);
10222 case OPC_CMP_UN_PS
:
10223 case OPC_CMP_EQ_PS
:
10224 case OPC_CMP_UEQ_PS
:
10225 case OPC_CMP_OLT_PS
:
10226 case OPC_CMP_ULT_PS
:
10227 case OPC_CMP_OLE_PS
:
10228 case OPC_CMP_ULE_PS
:
10229 case OPC_CMP_SF_PS
:
10230 case OPC_CMP_NGLE_PS
:
10231 case OPC_CMP_SEQ_PS
:
10232 case OPC_CMP_NGL_PS
:
10233 case OPC_CMP_LT_PS
:
10234 case OPC_CMP_NGE_PS
:
10235 case OPC_CMP_LE_PS
:
10236 case OPC_CMP_NGT_PS
:
10237 if (ctx
->opcode
& (1 << 6)) {
10238 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10240 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10244 MIPS_INVAL("farith");
10245 generate_exception_end(ctx
, EXCP_RI
);
10250 /* Coprocessor 3 (FPU) */
10251 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10252 int fd
, int fs
, int base
, int index
)
10254 TCGv t0
= tcg_temp_new();
10257 gen_load_gpr(t0
, index
);
10258 } else if (index
== 0) {
10259 gen_load_gpr(t0
, base
);
10261 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10263 /* Don't do NOP if destination is zero: we must perform the actual
10269 TCGv_i32 fp0
= tcg_temp_new_i32();
10271 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10272 tcg_gen_trunc_tl_i32(fp0
, t0
);
10273 gen_store_fpr32(ctx
, fp0
, fd
);
10274 tcg_temp_free_i32(fp0
);
10279 check_cp1_registers(ctx
, fd
);
10281 TCGv_i64 fp0
= tcg_temp_new_i64();
10282 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10283 gen_store_fpr64(ctx
, fp0
, fd
);
10284 tcg_temp_free_i64(fp0
);
10288 check_cp1_64bitmode(ctx
);
10289 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10291 TCGv_i64 fp0
= tcg_temp_new_i64();
10293 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10294 gen_store_fpr64(ctx
, fp0
, fd
);
10295 tcg_temp_free_i64(fp0
);
10301 TCGv_i32 fp0
= tcg_temp_new_i32();
10302 gen_load_fpr32(ctx
, fp0
, fs
);
10303 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10304 tcg_temp_free_i32(fp0
);
10309 check_cp1_registers(ctx
, fs
);
10311 TCGv_i64 fp0
= tcg_temp_new_i64();
10312 gen_load_fpr64(ctx
, fp0
, fs
);
10313 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10314 tcg_temp_free_i64(fp0
);
10318 check_cp1_64bitmode(ctx
);
10319 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10321 TCGv_i64 fp0
= tcg_temp_new_i64();
10322 gen_load_fpr64(ctx
, fp0
, fs
);
10323 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10324 tcg_temp_free_i64(fp0
);
10331 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10332 int fd
, int fr
, int fs
, int ft
)
10338 TCGv t0
= tcg_temp_local_new();
10339 TCGv_i32 fp
= tcg_temp_new_i32();
10340 TCGv_i32 fph
= tcg_temp_new_i32();
10341 TCGLabel
*l1
= gen_new_label();
10342 TCGLabel
*l2
= gen_new_label();
10344 gen_load_gpr(t0
, fr
);
10345 tcg_gen_andi_tl(t0
, t0
, 0x7);
10347 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10348 gen_load_fpr32(ctx
, fp
, fs
);
10349 gen_load_fpr32h(ctx
, fph
, fs
);
10350 gen_store_fpr32(ctx
, fp
, fd
);
10351 gen_store_fpr32h(ctx
, fph
, fd
);
10354 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10356 #ifdef TARGET_WORDS_BIGENDIAN
10357 gen_load_fpr32(ctx
, fp
, fs
);
10358 gen_load_fpr32h(ctx
, fph
, ft
);
10359 gen_store_fpr32h(ctx
, fp
, fd
);
10360 gen_store_fpr32(ctx
, fph
, fd
);
10362 gen_load_fpr32h(ctx
, fph
, fs
);
10363 gen_load_fpr32(ctx
, fp
, ft
);
10364 gen_store_fpr32(ctx
, fph
, fd
);
10365 gen_store_fpr32h(ctx
, fp
, fd
);
10368 tcg_temp_free_i32(fp
);
10369 tcg_temp_free_i32(fph
);
10375 TCGv_i32 fp0
= tcg_temp_new_i32();
10376 TCGv_i32 fp1
= tcg_temp_new_i32();
10377 TCGv_i32 fp2
= tcg_temp_new_i32();
10379 gen_load_fpr32(ctx
, fp0
, fs
);
10380 gen_load_fpr32(ctx
, fp1
, ft
);
10381 gen_load_fpr32(ctx
, fp2
, fr
);
10382 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10383 tcg_temp_free_i32(fp0
);
10384 tcg_temp_free_i32(fp1
);
10385 gen_store_fpr32(ctx
, fp2
, fd
);
10386 tcg_temp_free_i32(fp2
);
10391 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10393 TCGv_i64 fp0
= tcg_temp_new_i64();
10394 TCGv_i64 fp1
= tcg_temp_new_i64();
10395 TCGv_i64 fp2
= tcg_temp_new_i64();
10397 gen_load_fpr64(ctx
, fp0
, fs
);
10398 gen_load_fpr64(ctx
, fp1
, ft
);
10399 gen_load_fpr64(ctx
, fp2
, fr
);
10400 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10401 tcg_temp_free_i64(fp0
);
10402 tcg_temp_free_i64(fp1
);
10403 gen_store_fpr64(ctx
, fp2
, fd
);
10404 tcg_temp_free_i64(fp2
);
10410 TCGv_i64 fp0
= tcg_temp_new_i64();
10411 TCGv_i64 fp1
= tcg_temp_new_i64();
10412 TCGv_i64 fp2
= tcg_temp_new_i64();
10414 gen_load_fpr64(ctx
, fp0
, fs
);
10415 gen_load_fpr64(ctx
, fp1
, ft
);
10416 gen_load_fpr64(ctx
, fp2
, fr
);
10417 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10418 tcg_temp_free_i64(fp0
);
10419 tcg_temp_free_i64(fp1
);
10420 gen_store_fpr64(ctx
, fp2
, fd
);
10421 tcg_temp_free_i64(fp2
);
10427 TCGv_i32 fp0
= tcg_temp_new_i32();
10428 TCGv_i32 fp1
= tcg_temp_new_i32();
10429 TCGv_i32 fp2
= tcg_temp_new_i32();
10431 gen_load_fpr32(ctx
, fp0
, fs
);
10432 gen_load_fpr32(ctx
, fp1
, ft
);
10433 gen_load_fpr32(ctx
, fp2
, fr
);
10434 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10435 tcg_temp_free_i32(fp0
);
10436 tcg_temp_free_i32(fp1
);
10437 gen_store_fpr32(ctx
, fp2
, fd
);
10438 tcg_temp_free_i32(fp2
);
10443 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10445 TCGv_i64 fp0
= tcg_temp_new_i64();
10446 TCGv_i64 fp1
= tcg_temp_new_i64();
10447 TCGv_i64 fp2
= tcg_temp_new_i64();
10449 gen_load_fpr64(ctx
, fp0
, fs
);
10450 gen_load_fpr64(ctx
, fp1
, ft
);
10451 gen_load_fpr64(ctx
, fp2
, fr
);
10452 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10453 tcg_temp_free_i64(fp0
);
10454 tcg_temp_free_i64(fp1
);
10455 gen_store_fpr64(ctx
, fp2
, fd
);
10456 tcg_temp_free_i64(fp2
);
10462 TCGv_i64 fp0
= tcg_temp_new_i64();
10463 TCGv_i64 fp1
= tcg_temp_new_i64();
10464 TCGv_i64 fp2
= tcg_temp_new_i64();
10466 gen_load_fpr64(ctx
, fp0
, fs
);
10467 gen_load_fpr64(ctx
, fp1
, ft
);
10468 gen_load_fpr64(ctx
, fp2
, fr
);
10469 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10470 tcg_temp_free_i64(fp0
);
10471 tcg_temp_free_i64(fp1
);
10472 gen_store_fpr64(ctx
, fp2
, fd
);
10473 tcg_temp_free_i64(fp2
);
10479 TCGv_i32 fp0
= tcg_temp_new_i32();
10480 TCGv_i32 fp1
= tcg_temp_new_i32();
10481 TCGv_i32 fp2
= tcg_temp_new_i32();
10483 gen_load_fpr32(ctx
, fp0
, fs
);
10484 gen_load_fpr32(ctx
, fp1
, ft
);
10485 gen_load_fpr32(ctx
, fp2
, fr
);
10486 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10487 tcg_temp_free_i32(fp0
);
10488 tcg_temp_free_i32(fp1
);
10489 gen_store_fpr32(ctx
, fp2
, fd
);
10490 tcg_temp_free_i32(fp2
);
10495 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10497 TCGv_i64 fp0
= tcg_temp_new_i64();
10498 TCGv_i64 fp1
= tcg_temp_new_i64();
10499 TCGv_i64 fp2
= tcg_temp_new_i64();
10501 gen_load_fpr64(ctx
, fp0
, fs
);
10502 gen_load_fpr64(ctx
, fp1
, ft
);
10503 gen_load_fpr64(ctx
, fp2
, fr
);
10504 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10505 tcg_temp_free_i64(fp0
);
10506 tcg_temp_free_i64(fp1
);
10507 gen_store_fpr64(ctx
, fp2
, fd
);
10508 tcg_temp_free_i64(fp2
);
10514 TCGv_i64 fp0
= tcg_temp_new_i64();
10515 TCGv_i64 fp1
= tcg_temp_new_i64();
10516 TCGv_i64 fp2
= tcg_temp_new_i64();
10518 gen_load_fpr64(ctx
, fp0
, fs
);
10519 gen_load_fpr64(ctx
, fp1
, ft
);
10520 gen_load_fpr64(ctx
, fp2
, fr
);
10521 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10522 tcg_temp_free_i64(fp0
);
10523 tcg_temp_free_i64(fp1
);
10524 gen_store_fpr64(ctx
, fp2
, fd
);
10525 tcg_temp_free_i64(fp2
);
10531 TCGv_i32 fp0
= tcg_temp_new_i32();
10532 TCGv_i32 fp1
= tcg_temp_new_i32();
10533 TCGv_i32 fp2
= tcg_temp_new_i32();
10535 gen_load_fpr32(ctx
, fp0
, fs
);
10536 gen_load_fpr32(ctx
, fp1
, ft
);
10537 gen_load_fpr32(ctx
, fp2
, fr
);
10538 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10539 tcg_temp_free_i32(fp0
);
10540 tcg_temp_free_i32(fp1
);
10541 gen_store_fpr32(ctx
, fp2
, fd
);
10542 tcg_temp_free_i32(fp2
);
10547 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10549 TCGv_i64 fp0
= tcg_temp_new_i64();
10550 TCGv_i64 fp1
= tcg_temp_new_i64();
10551 TCGv_i64 fp2
= tcg_temp_new_i64();
10553 gen_load_fpr64(ctx
, fp0
, fs
);
10554 gen_load_fpr64(ctx
, fp1
, ft
);
10555 gen_load_fpr64(ctx
, fp2
, fr
);
10556 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10557 tcg_temp_free_i64(fp0
);
10558 tcg_temp_free_i64(fp1
);
10559 gen_store_fpr64(ctx
, fp2
, fd
);
10560 tcg_temp_free_i64(fp2
);
10566 TCGv_i64 fp0
= tcg_temp_new_i64();
10567 TCGv_i64 fp1
= tcg_temp_new_i64();
10568 TCGv_i64 fp2
= tcg_temp_new_i64();
10570 gen_load_fpr64(ctx
, fp0
, fs
);
10571 gen_load_fpr64(ctx
, fp1
, ft
);
10572 gen_load_fpr64(ctx
, fp2
, fr
);
10573 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10574 tcg_temp_free_i64(fp0
);
10575 tcg_temp_free_i64(fp1
);
10576 gen_store_fpr64(ctx
, fp2
, fd
);
10577 tcg_temp_free_i64(fp2
);
10581 MIPS_INVAL("flt3_arith");
10582 generate_exception_end(ctx
, EXCP_RI
);
10587 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10591 #if !defined(CONFIG_USER_ONLY)
10592 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10593 Therefore only check the ISA in system mode. */
10594 check_insn(ctx
, ISA_MIPS32R2
);
10596 t0
= tcg_temp_new();
10600 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10601 gen_store_gpr(t0
, rt
);
10604 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10605 gen_store_gpr(t0
, rt
);
10608 gen_helper_rdhwr_cc(t0
, cpu_env
);
10609 gen_store_gpr(t0
, rt
);
10612 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10613 gen_store_gpr(t0
, rt
);
10616 check_insn(ctx
, ISA_MIPS32R6
);
10618 /* Performance counter registers are not implemented other than
10619 * control register 0.
10621 generate_exception(ctx
, EXCP_RI
);
10623 gen_helper_rdhwr_performance(t0
, cpu_env
);
10624 gen_store_gpr(t0
, rt
);
10627 check_insn(ctx
, ISA_MIPS32R6
);
10628 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10629 gen_store_gpr(t0
, rt
);
10632 #if defined(CONFIG_USER_ONLY)
10633 tcg_gen_ld_tl(t0
, cpu_env
,
10634 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10635 gen_store_gpr(t0
, rt
);
10638 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10639 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10640 tcg_gen_ld_tl(t0
, cpu_env
,
10641 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10642 gen_store_gpr(t0
, rt
);
10644 generate_exception_end(ctx
, EXCP_RI
);
10648 default: /* Invalid */
10649 MIPS_INVAL("rdhwr");
10650 generate_exception_end(ctx
, EXCP_RI
);
10656 static inline void clear_branch_hflags(DisasContext
*ctx
)
10658 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10659 if (ctx
->bstate
== BS_NONE
) {
10660 save_cpu_state(ctx
, 0);
10662 /* it is not safe to save ctx->hflags as hflags may be changed
10663 in execution time by the instruction in delay / forbidden slot. */
10664 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10668 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10670 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10671 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10672 /* Branches completion */
10673 clear_branch_hflags(ctx
);
10674 ctx
->bstate
= BS_BRANCH
;
10675 /* FIXME: Need to clear can_do_io. */
10676 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10677 case MIPS_HFLAG_FBNSLOT
:
10678 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10681 /* unconditional branch */
10682 if (proc_hflags
& MIPS_HFLAG_BX
) {
10683 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10685 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10687 case MIPS_HFLAG_BL
:
10688 /* blikely taken case */
10689 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10691 case MIPS_HFLAG_BC
:
10692 /* Conditional branch */
10694 TCGLabel
*l1
= gen_new_label();
10696 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10697 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10699 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10702 case MIPS_HFLAG_BR
:
10703 /* unconditional branch to register */
10704 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10705 TCGv t0
= tcg_temp_new();
10706 TCGv_i32 t1
= tcg_temp_new_i32();
10708 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10709 tcg_gen_trunc_tl_i32(t1
, t0
);
10711 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10712 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10713 tcg_gen_or_i32(hflags
, hflags
, t1
);
10714 tcg_temp_free_i32(t1
);
10716 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10718 tcg_gen_mov_tl(cpu_PC
, btarget
);
10720 if (ctx
->singlestep_enabled
) {
10721 save_cpu_state(ctx
, 0);
10722 gen_helper_raise_exception_debug(cpu_env
);
10724 tcg_gen_exit_tb(0);
10727 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10733 /* Compact Branches */
10734 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10735 int rs
, int rt
, int32_t offset
)
10737 int bcond_compute
= 0;
10738 TCGv t0
= tcg_temp_new();
10739 TCGv t1
= tcg_temp_new();
10740 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10742 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10743 #ifdef MIPS_DEBUG_DISAS
10744 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10747 generate_exception_end(ctx
, EXCP_RI
);
10751 /* Load needed operands and calculate btarget */
10753 /* compact branch */
10754 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10755 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10756 gen_load_gpr(t0
, rs
);
10757 gen_load_gpr(t1
, rt
);
10759 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10760 if (rs
<= rt
&& rs
== 0) {
10761 /* OPC_BEQZALC, OPC_BNEZALC */
10762 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10765 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10766 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10767 gen_load_gpr(t0
, rs
);
10768 gen_load_gpr(t1
, rt
);
10770 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10772 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10773 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10774 if (rs
== 0 || rs
== rt
) {
10775 /* OPC_BLEZALC, OPC_BGEZALC */
10776 /* OPC_BGTZALC, OPC_BLTZALC */
10777 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10779 gen_load_gpr(t0
, rs
);
10780 gen_load_gpr(t1
, rt
);
10782 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10786 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10791 /* OPC_BEQZC, OPC_BNEZC */
10792 gen_load_gpr(t0
, rs
);
10794 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10796 /* OPC_JIC, OPC_JIALC */
10797 TCGv tbase
= tcg_temp_new();
10798 TCGv toffset
= tcg_temp_new();
10800 gen_load_gpr(tbase
, rt
);
10801 tcg_gen_movi_tl(toffset
, offset
);
10802 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10803 tcg_temp_free(tbase
);
10804 tcg_temp_free(toffset
);
10808 MIPS_INVAL("Compact branch/jump");
10809 generate_exception_end(ctx
, EXCP_RI
);
10813 if (bcond_compute
== 0) {
10814 /* Uncoditional compact branch */
10817 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10820 ctx
->hflags
|= MIPS_HFLAG_BR
;
10823 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10826 ctx
->hflags
|= MIPS_HFLAG_B
;
10829 MIPS_INVAL("Compact branch/jump");
10830 generate_exception_end(ctx
, EXCP_RI
);
10834 /* Generating branch here as compact branches don't have delay slot */
10835 gen_branch(ctx
, 4);
10837 /* Conditional compact branch */
10838 TCGLabel
*fs
= gen_new_label();
10839 save_cpu_state(ctx
, 0);
10842 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10843 if (rs
== 0 && rt
!= 0) {
10845 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10846 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10848 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10851 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10854 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10855 if (rs
== 0 && rt
!= 0) {
10857 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10858 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10860 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10863 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10866 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10867 if (rs
== 0 && rt
!= 0) {
10869 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10870 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10872 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10875 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10878 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10879 if (rs
== 0 && rt
!= 0) {
10881 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10882 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10884 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10887 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10890 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10891 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10893 /* OPC_BOVC, OPC_BNVC */
10894 TCGv t2
= tcg_temp_new();
10895 TCGv t3
= tcg_temp_new();
10896 TCGv t4
= tcg_temp_new();
10897 TCGv input_overflow
= tcg_temp_new();
10899 gen_load_gpr(t0
, rs
);
10900 gen_load_gpr(t1
, rt
);
10901 tcg_gen_ext32s_tl(t2
, t0
);
10902 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10903 tcg_gen_ext32s_tl(t3
, t1
);
10904 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10905 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10907 tcg_gen_add_tl(t4
, t2
, t3
);
10908 tcg_gen_ext32s_tl(t4
, t4
);
10909 tcg_gen_xor_tl(t2
, t2
, t3
);
10910 tcg_gen_xor_tl(t3
, t4
, t3
);
10911 tcg_gen_andc_tl(t2
, t3
, t2
);
10912 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10913 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10914 if (opc
== OPC_BOVC
) {
10916 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10919 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10921 tcg_temp_free(input_overflow
);
10925 } else if (rs
< rt
&& rs
== 0) {
10926 /* OPC_BEQZALC, OPC_BNEZALC */
10927 if (opc
== OPC_BEQZALC
) {
10929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10935 /* OPC_BEQC, OPC_BNEC */
10936 if (opc
== OPC_BEQC
) {
10938 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10941 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10946 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10949 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10952 MIPS_INVAL("Compact conditional branch/jump");
10953 generate_exception_end(ctx
, EXCP_RI
);
10957 /* Generating branch here as compact branches don't have delay slot */
10958 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10961 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10969 /* ISA extensions (ASEs) */
10970 /* MIPS16 extension to MIPS32 */
10972 /* MIPS16 major opcodes */
10974 M16_OPC_ADDIUSP
= 0x00,
10975 M16_OPC_ADDIUPC
= 0x01,
10977 M16_OPC_JAL
= 0x03,
10978 M16_OPC_BEQZ
= 0x04,
10979 M16_OPC_BNEQZ
= 0x05,
10980 M16_OPC_SHIFT
= 0x06,
10982 M16_OPC_RRIA
= 0x08,
10983 M16_OPC_ADDIU8
= 0x09,
10984 M16_OPC_SLTI
= 0x0a,
10985 M16_OPC_SLTIU
= 0x0b,
10988 M16_OPC_CMPI
= 0x0e,
10992 M16_OPC_LWSP
= 0x12,
10994 M16_OPC_LBU
= 0x14,
10995 M16_OPC_LHU
= 0x15,
10996 M16_OPC_LWPC
= 0x16,
10997 M16_OPC_LWU
= 0x17,
11000 M16_OPC_SWSP
= 0x1a,
11002 M16_OPC_RRR
= 0x1c,
11004 M16_OPC_EXTEND
= 0x1e,
11008 /* I8 funct field */
11027 /* RR funct field */
11061 /* I64 funct field */
11069 I64_DADDIUPC
= 0x6,
11073 /* RR ry field for CNVT */
11075 RR_RY_CNVT_ZEB
= 0x0,
11076 RR_RY_CNVT_ZEH
= 0x1,
11077 RR_RY_CNVT_ZEW
= 0x2,
11078 RR_RY_CNVT_SEB
= 0x4,
11079 RR_RY_CNVT_SEH
= 0x5,
11080 RR_RY_CNVT_SEW
= 0x6,
11083 static int xlat (int r
)
11085 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11090 static void gen_mips16_save (DisasContext
*ctx
,
11091 int xsregs
, int aregs
,
11092 int do_ra
, int do_s0
, int do_s1
,
11095 TCGv t0
= tcg_temp_new();
11096 TCGv t1
= tcg_temp_new();
11097 TCGv t2
= tcg_temp_new();
11127 generate_exception_end(ctx
, EXCP_RI
);
11133 gen_base_offset_addr(ctx
, t0
, 29, 12);
11134 gen_load_gpr(t1
, 7);
11135 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11138 gen_base_offset_addr(ctx
, t0
, 29, 8);
11139 gen_load_gpr(t1
, 6);
11140 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11143 gen_base_offset_addr(ctx
, t0
, 29, 4);
11144 gen_load_gpr(t1
, 5);
11145 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11148 gen_base_offset_addr(ctx
, t0
, 29, 0);
11149 gen_load_gpr(t1
, 4);
11150 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11153 gen_load_gpr(t0
, 29);
11155 #define DECR_AND_STORE(reg) do { \
11156 tcg_gen_movi_tl(t2, -4); \
11157 gen_op_addr_add(ctx, t0, t0, t2); \
11158 gen_load_gpr(t1, reg); \
11159 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11163 DECR_AND_STORE(31);
11168 DECR_AND_STORE(30);
11171 DECR_AND_STORE(23);
11174 DECR_AND_STORE(22);
11177 DECR_AND_STORE(21);
11180 DECR_AND_STORE(20);
11183 DECR_AND_STORE(19);
11186 DECR_AND_STORE(18);
11190 DECR_AND_STORE(17);
11193 DECR_AND_STORE(16);
11223 generate_exception_end(ctx
, EXCP_RI
);
11239 #undef DECR_AND_STORE
11241 tcg_gen_movi_tl(t2
, -framesize
);
11242 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11248 static void gen_mips16_restore (DisasContext
*ctx
,
11249 int xsregs
, int aregs
,
11250 int do_ra
, int do_s0
, int do_s1
,
11254 TCGv t0
= tcg_temp_new();
11255 TCGv t1
= tcg_temp_new();
11256 TCGv t2
= tcg_temp_new();
11258 tcg_gen_movi_tl(t2
, framesize
);
11259 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11261 #define DECR_AND_LOAD(reg) do { \
11262 tcg_gen_movi_tl(t2, -4); \
11263 gen_op_addr_add(ctx, t0, t0, t2); \
11264 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11265 gen_store_gpr(t1, reg); \
11329 generate_exception_end(ctx
, EXCP_RI
);
11345 #undef DECR_AND_LOAD
11347 tcg_gen_movi_tl(t2
, framesize
);
11348 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11354 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11355 int is_64_bit
, int extended
)
11359 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11360 generate_exception_end(ctx
, EXCP_RI
);
11364 t0
= tcg_temp_new();
11366 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11367 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11369 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11375 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11378 TCGv_i32 t0
= tcg_const_i32(op
);
11379 TCGv t1
= tcg_temp_new();
11380 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11381 gen_helper_cache(cpu_env
, t1
, t0
);
11384 #if defined(TARGET_MIPS64)
11385 static void decode_i64_mips16 (DisasContext
*ctx
,
11386 int ry
, int funct
, int16_t offset
,
11391 check_insn(ctx
, ISA_MIPS3
);
11392 check_mips_64(ctx
);
11393 offset
= extended
? offset
: offset
<< 3;
11394 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11397 check_insn(ctx
, ISA_MIPS3
);
11398 check_mips_64(ctx
);
11399 offset
= extended
? offset
: offset
<< 3;
11400 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11403 check_insn(ctx
, ISA_MIPS3
);
11404 check_mips_64(ctx
);
11405 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11406 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11409 check_insn(ctx
, ISA_MIPS3
);
11410 check_mips_64(ctx
);
11411 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11412 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11415 check_insn(ctx
, ISA_MIPS3
);
11416 check_mips_64(ctx
);
11417 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11418 generate_exception_end(ctx
, EXCP_RI
);
11420 offset
= extended
? offset
: offset
<< 3;
11421 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11425 check_insn(ctx
, ISA_MIPS3
);
11426 check_mips_64(ctx
);
11427 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11428 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11431 check_insn(ctx
, ISA_MIPS3
);
11432 check_mips_64(ctx
);
11433 offset
= extended
? offset
: offset
<< 2;
11434 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11437 check_insn(ctx
, ISA_MIPS3
);
11438 check_mips_64(ctx
);
11439 offset
= extended
? offset
: offset
<< 2;
11440 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11446 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11448 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11449 int op
, rx
, ry
, funct
, sa
;
11450 int16_t imm
, offset
;
11452 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11453 op
= (ctx
->opcode
>> 11) & 0x1f;
11454 sa
= (ctx
->opcode
>> 22) & 0x1f;
11455 funct
= (ctx
->opcode
>> 8) & 0x7;
11456 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11457 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11458 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11459 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11460 | (ctx
->opcode
& 0x1f));
11462 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11465 case M16_OPC_ADDIUSP
:
11466 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11468 case M16_OPC_ADDIUPC
:
11469 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11472 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11473 /* No delay slot, so just process as a normal instruction */
11476 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11477 /* No delay slot, so just process as a normal instruction */
11479 case M16_OPC_BNEQZ
:
11480 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11481 /* No delay slot, so just process as a normal instruction */
11483 case M16_OPC_SHIFT
:
11484 switch (ctx
->opcode
& 0x3) {
11486 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11489 #if defined(TARGET_MIPS64)
11490 check_mips_64(ctx
);
11491 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11493 generate_exception_end(ctx
, EXCP_RI
);
11497 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11500 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11504 #if defined(TARGET_MIPS64)
11506 check_insn(ctx
, ISA_MIPS3
);
11507 check_mips_64(ctx
);
11508 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11512 imm
= ctx
->opcode
& 0xf;
11513 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11514 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11515 imm
= (int16_t) (imm
<< 1) >> 1;
11516 if ((ctx
->opcode
>> 4) & 0x1) {
11517 #if defined(TARGET_MIPS64)
11518 check_mips_64(ctx
);
11519 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11521 generate_exception_end(ctx
, EXCP_RI
);
11524 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11527 case M16_OPC_ADDIU8
:
11528 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11531 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11533 case M16_OPC_SLTIU
:
11534 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11539 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11542 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11545 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11548 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11551 check_insn(ctx
, ISA_MIPS32
);
11553 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11554 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11555 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11556 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11557 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11558 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11559 | (ctx
->opcode
& 0xf)) << 3;
11561 if (ctx
->opcode
& (1 << 7)) {
11562 gen_mips16_save(ctx
, xsregs
, aregs
,
11563 do_ra
, do_s0
, do_s1
,
11566 gen_mips16_restore(ctx
, xsregs
, aregs
,
11567 do_ra
, do_s0
, do_s1
,
11573 generate_exception_end(ctx
, EXCP_RI
);
11578 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11581 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11583 #if defined(TARGET_MIPS64)
11585 check_insn(ctx
, ISA_MIPS3
);
11586 check_mips_64(ctx
);
11587 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11591 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11594 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11597 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11600 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11603 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11606 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11609 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11611 #if defined(TARGET_MIPS64)
11613 check_insn(ctx
, ISA_MIPS3
);
11614 check_mips_64(ctx
);
11615 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11619 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11622 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11625 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11628 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11630 #if defined(TARGET_MIPS64)
11632 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11636 generate_exception_end(ctx
, EXCP_RI
);
11643 static inline bool is_uhi(int sdbbp_code
)
11645 #ifdef CONFIG_USER_ONLY
11648 return semihosting_enabled() && sdbbp_code
== 1;
11652 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11656 int op
, cnvt_op
, op1
, offset
;
11660 op
= (ctx
->opcode
>> 11) & 0x1f;
11661 sa
= (ctx
->opcode
>> 2) & 0x7;
11662 sa
= sa
== 0 ? 8 : sa
;
11663 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11664 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11665 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11666 op1
= offset
= ctx
->opcode
& 0x1f;
11671 case M16_OPC_ADDIUSP
:
11673 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11675 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11678 case M16_OPC_ADDIUPC
:
11679 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11682 offset
= (ctx
->opcode
& 0x7ff) << 1;
11683 offset
= (int16_t)(offset
<< 4) >> 4;
11684 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11685 /* No delay slot, so just process as a normal instruction */
11688 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11689 offset
= (((ctx
->opcode
& 0x1f) << 21)
11690 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11692 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11693 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11697 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11698 ((int8_t)ctx
->opcode
) << 1, 0);
11699 /* No delay slot, so just process as a normal instruction */
11701 case M16_OPC_BNEQZ
:
11702 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11703 ((int8_t)ctx
->opcode
) << 1, 0);
11704 /* No delay slot, so just process as a normal instruction */
11706 case M16_OPC_SHIFT
:
11707 switch (ctx
->opcode
& 0x3) {
11709 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11712 #if defined(TARGET_MIPS64)
11713 check_insn(ctx
, ISA_MIPS3
);
11714 check_mips_64(ctx
);
11715 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11717 generate_exception_end(ctx
, EXCP_RI
);
11721 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11724 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11728 #if defined(TARGET_MIPS64)
11730 check_insn(ctx
, ISA_MIPS3
);
11731 check_mips_64(ctx
);
11732 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11737 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11739 if ((ctx
->opcode
>> 4) & 1) {
11740 #if defined(TARGET_MIPS64)
11741 check_insn(ctx
, ISA_MIPS3
);
11742 check_mips_64(ctx
);
11743 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11745 generate_exception_end(ctx
, EXCP_RI
);
11748 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11752 case M16_OPC_ADDIU8
:
11754 int16_t imm
= (int8_t) ctx
->opcode
;
11756 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11761 int16_t imm
= (uint8_t) ctx
->opcode
;
11762 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11765 case M16_OPC_SLTIU
:
11767 int16_t imm
= (uint8_t) ctx
->opcode
;
11768 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11775 funct
= (ctx
->opcode
>> 8) & 0x7;
11778 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11779 ((int8_t)ctx
->opcode
) << 1, 0);
11782 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11783 ((int8_t)ctx
->opcode
) << 1, 0);
11786 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11789 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11790 ((int8_t)ctx
->opcode
) << 3);
11793 check_insn(ctx
, ISA_MIPS32
);
11795 int do_ra
= ctx
->opcode
& (1 << 6);
11796 int do_s0
= ctx
->opcode
& (1 << 5);
11797 int do_s1
= ctx
->opcode
& (1 << 4);
11798 int framesize
= ctx
->opcode
& 0xf;
11800 if (framesize
== 0) {
11803 framesize
= framesize
<< 3;
11806 if (ctx
->opcode
& (1 << 7)) {
11807 gen_mips16_save(ctx
, 0, 0,
11808 do_ra
, do_s0
, do_s1
, framesize
);
11810 gen_mips16_restore(ctx
, 0, 0,
11811 do_ra
, do_s0
, do_s1
, framesize
);
11817 int rz
= xlat(ctx
->opcode
& 0x7);
11819 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11820 ((ctx
->opcode
>> 5) & 0x7);
11821 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11825 reg32
= ctx
->opcode
& 0x1f;
11826 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11829 generate_exception_end(ctx
, EXCP_RI
);
11836 int16_t imm
= (uint8_t) ctx
->opcode
;
11838 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11843 int16_t imm
= (uint8_t) ctx
->opcode
;
11844 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11847 #if defined(TARGET_MIPS64)
11849 check_insn(ctx
, ISA_MIPS3
);
11850 check_mips_64(ctx
);
11851 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11855 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11858 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11861 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11864 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11867 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11870 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11873 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11875 #if defined (TARGET_MIPS64)
11877 check_insn(ctx
, ISA_MIPS3
);
11878 check_mips_64(ctx
);
11879 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11883 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11886 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11889 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11892 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11896 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11899 switch (ctx
->opcode
& 0x3) {
11901 mips32_op
= OPC_ADDU
;
11904 mips32_op
= OPC_SUBU
;
11906 #if defined(TARGET_MIPS64)
11908 mips32_op
= OPC_DADDU
;
11909 check_insn(ctx
, ISA_MIPS3
);
11910 check_mips_64(ctx
);
11913 mips32_op
= OPC_DSUBU
;
11914 check_insn(ctx
, ISA_MIPS3
);
11915 check_mips_64(ctx
);
11919 generate_exception_end(ctx
, EXCP_RI
);
11923 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11932 int nd
= (ctx
->opcode
>> 7) & 0x1;
11933 int link
= (ctx
->opcode
>> 6) & 0x1;
11934 int ra
= (ctx
->opcode
>> 5) & 0x1;
11937 check_insn(ctx
, ISA_MIPS32
);
11946 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11951 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11952 gen_helper_do_semihosting(cpu_env
);
11954 /* XXX: not clear which exception should be raised
11955 * when in debug mode...
11957 check_insn(ctx
, ISA_MIPS32
);
11958 generate_exception_end(ctx
, EXCP_DBp
);
11962 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11965 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11968 generate_exception_end(ctx
, EXCP_BREAK
);
11971 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11974 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11977 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11979 #if defined (TARGET_MIPS64)
11981 check_insn(ctx
, ISA_MIPS3
);
11982 check_mips_64(ctx
);
11983 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11987 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11990 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11993 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11996 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11999 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12002 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12005 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12008 check_insn(ctx
, ISA_MIPS32
);
12010 case RR_RY_CNVT_ZEB
:
12011 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12013 case RR_RY_CNVT_ZEH
:
12014 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12016 case RR_RY_CNVT_SEB
:
12017 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12019 case RR_RY_CNVT_SEH
:
12020 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12022 #if defined (TARGET_MIPS64)
12023 case RR_RY_CNVT_ZEW
:
12024 check_insn(ctx
, ISA_MIPS64
);
12025 check_mips_64(ctx
);
12026 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12028 case RR_RY_CNVT_SEW
:
12029 check_insn(ctx
, ISA_MIPS64
);
12030 check_mips_64(ctx
);
12031 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12035 generate_exception_end(ctx
, EXCP_RI
);
12040 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12042 #if defined (TARGET_MIPS64)
12044 check_insn(ctx
, ISA_MIPS3
);
12045 check_mips_64(ctx
);
12046 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12049 check_insn(ctx
, ISA_MIPS3
);
12050 check_mips_64(ctx
);
12051 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12054 check_insn(ctx
, ISA_MIPS3
);
12055 check_mips_64(ctx
);
12056 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12059 check_insn(ctx
, ISA_MIPS3
);
12060 check_mips_64(ctx
);
12061 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12065 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12068 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12071 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12074 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12076 #if defined (TARGET_MIPS64)
12078 check_insn(ctx
, ISA_MIPS3
);
12079 check_mips_64(ctx
);
12080 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12083 check_insn(ctx
, ISA_MIPS3
);
12084 check_mips_64(ctx
);
12085 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12088 check_insn(ctx
, ISA_MIPS3
);
12089 check_mips_64(ctx
);
12090 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12093 check_insn(ctx
, ISA_MIPS3
);
12094 check_mips_64(ctx
);
12095 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12099 generate_exception_end(ctx
, EXCP_RI
);
12103 case M16_OPC_EXTEND
:
12104 decode_extended_mips16_opc(env
, ctx
);
12107 #if defined(TARGET_MIPS64)
12109 funct
= (ctx
->opcode
>> 8) & 0x7;
12110 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12114 generate_exception_end(ctx
, EXCP_RI
);
12121 /* microMIPS extension to MIPS32/MIPS64 */
12124 * microMIPS32/microMIPS64 major opcodes
12126 * 1. MIPS Architecture for Programmers Volume II-B:
12127 * The microMIPS32 Instruction Set (Revision 3.05)
12129 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12131 * 2. MIPS Architecture For Programmers Volume II-A:
12132 * The MIPS64 Instruction Set (Revision 3.51)
12162 POOL32S
= 0x16, /* MIPS64 */
12163 DADDIU32
= 0x17, /* MIPS64 */
12192 /* 0x29 is reserved */
12205 /* 0x31 is reserved */
12218 SD32
= 0x36, /* MIPS64 */
12219 LD32
= 0x37, /* MIPS64 */
12221 /* 0x39 is reserved */
12237 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12247 /* POOL32A encoding of minor opcode field */
12250 /* These opcodes are distinguished only by bits 9..6; those bits are
12251 * what are recorded below. */
12288 /* The following can be distinguished by their lower 6 bits. */
12298 /* POOL32AXF encoding of minor opcode field extension */
12301 * 1. MIPS Architecture for Programmers Volume II-B:
12302 * The microMIPS32 Instruction Set (Revision 3.05)
12304 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12306 * 2. MIPS Architecture for Programmers VolumeIV-e:
12307 * The MIPS DSP Application-Specific Extension
12308 * to the microMIPS32 Architecture (Revision 2.34)
12310 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12325 /* begin of microMIPS32 DSP */
12327 /* bits 13..12 for 0x01 */
12333 /* bits 13..12 for 0x2a */
12339 /* bits 13..12 for 0x32 */
12343 /* end of microMIPS32 DSP */
12345 /* bits 15..12 for 0x2c */
12362 /* bits 15..12 for 0x34 */
12370 /* bits 15..12 for 0x3c */
12372 JR
= 0x0, /* alias */
12380 /* bits 15..12 for 0x05 */
12384 /* bits 15..12 for 0x0d */
12396 /* bits 15..12 for 0x15 */
12402 /* bits 15..12 for 0x1d */
12406 /* bits 15..12 for 0x2d */
12411 /* bits 15..12 for 0x35 */
12418 /* POOL32B encoding of minor opcode field (bits 15..12) */
12434 /* POOL32C encoding of minor opcode field (bits 15..12) */
12442 /* 0xa is reserved */
12449 /* 0x6 is reserved */
12455 /* POOL32F encoding of minor opcode field (bits 5..0) */
12458 /* These are the bit 7..6 values */
12467 /* These are the bit 8..6 values */
12492 MOVZ_FMT_05
= 0x05,
12526 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12533 /* POOL32Fxf encoding of minor opcode extension field */
12571 /* POOL32I encoding of minor opcode field (bits 25..21) */
12601 /* These overlap and are distinguished by bit16 of the instruction */
12610 /* POOL16A encoding of minor opcode field */
12617 /* POOL16B encoding of minor opcode field */
12624 /* POOL16C encoding of minor opcode field */
12644 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12664 /* POOL16D encoding of minor opcode field */
12671 /* POOL16E encoding of minor opcode field */
12678 static int mmreg (int r
)
12680 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12685 /* Used for 16-bit store instructions. */
12686 static int mmreg2 (int r
)
12688 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12693 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12694 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12695 #define uMIPS_RS2(op) uMIPS_RS(op)
12696 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12697 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12698 #define uMIPS_RS5(op) (op & 0x1f)
12700 /* Signed immediate */
12701 #define SIMM(op, start, width) \
12702 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12705 /* Zero-extended immediate */
12706 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12708 static void gen_addiur1sp(DisasContext
*ctx
)
12710 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12712 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12715 static void gen_addiur2(DisasContext
*ctx
)
12717 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12718 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12719 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12721 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12724 static void gen_addiusp(DisasContext
*ctx
)
12726 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12729 if (encoded
<= 1) {
12730 decoded
= 256 + encoded
;
12731 } else if (encoded
<= 255) {
12733 } else if (encoded
<= 509) {
12734 decoded
= encoded
- 512;
12736 decoded
= encoded
- 768;
12739 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12742 static void gen_addius5(DisasContext
*ctx
)
12744 int imm
= SIMM(ctx
->opcode
, 1, 4);
12745 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12747 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12750 static void gen_andi16(DisasContext
*ctx
)
12752 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12753 31, 32, 63, 64, 255, 32768, 65535 };
12754 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12755 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12756 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12758 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12761 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12762 int base
, int16_t offset
)
12767 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12768 generate_exception_end(ctx
, EXCP_RI
);
12772 t0
= tcg_temp_new();
12774 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12776 t1
= tcg_const_tl(reglist
);
12777 t2
= tcg_const_i32(ctx
->mem_idx
);
12779 save_cpu_state(ctx
, 1);
12782 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12785 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12787 #ifdef TARGET_MIPS64
12789 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12792 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12798 tcg_temp_free_i32(t2
);
12802 static void gen_pool16c_insn(DisasContext
*ctx
)
12804 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12805 int rs
= mmreg(ctx
->opcode
& 0x7);
12807 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12812 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12818 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12824 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12830 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12837 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12838 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12840 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12849 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12850 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12852 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12859 int reg
= ctx
->opcode
& 0x1f;
12861 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12867 int reg
= ctx
->opcode
& 0x1f;
12868 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12869 /* Let normal delay slot handling in our caller take us
12870 to the branch target. */
12875 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12876 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12880 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12881 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12885 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12889 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12892 generate_exception_end(ctx
, EXCP_BREAK
);
12895 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12896 gen_helper_do_semihosting(cpu_env
);
12898 /* XXX: not clear which exception should be raised
12899 * when in debug mode...
12901 check_insn(ctx
, ISA_MIPS32
);
12902 generate_exception_end(ctx
, EXCP_DBp
);
12905 case JRADDIUSP
+ 0:
12906 case JRADDIUSP
+ 1:
12908 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12909 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12910 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12911 /* Let normal delay slot handling in our caller take us
12912 to the branch target. */
12916 generate_exception_end(ctx
, EXCP_RI
);
12921 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12924 int rd
, rs
, re
, rt
;
12925 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12926 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12927 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12928 rd
= rd_enc
[enc_dest
];
12929 re
= re_enc
[enc_dest
];
12930 rs
= rs_rt_enc
[enc_rs
];
12931 rt
= rs_rt_enc
[enc_rt
];
12933 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12935 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12938 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12940 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12944 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12946 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12947 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12949 switch (ctx
->opcode
& 0xf) {
12951 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12954 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12958 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12959 int offset
= extract32(ctx
->opcode
, 4, 4);
12960 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12963 case R6_JRC16
: /* JRCADDIUSP */
12964 if ((ctx
->opcode
>> 4) & 1) {
12966 int imm
= extract32(ctx
->opcode
, 5, 5);
12967 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12968 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12971 int rs
= extract32(ctx
->opcode
, 5, 5);
12972 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12975 case MOVEP
... MOVEP_07
:
12976 case MOVEP_0C
... MOVEP_0F
:
12978 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12979 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12980 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12981 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12985 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12988 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12992 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12993 int offset
= extract32(ctx
->opcode
, 4, 4);
12994 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12997 case JALRC16
: /* BREAK16, SDBBP16 */
12998 switch (ctx
->opcode
& 0x3f) {
13000 case JALRC16
+ 0x20:
13002 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13007 generate_exception(ctx
, EXCP_BREAK
);
13011 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13012 gen_helper_do_semihosting(cpu_env
);
13014 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13015 generate_exception(ctx
, EXCP_RI
);
13017 generate_exception(ctx
, EXCP_DBp
);
13024 generate_exception(ctx
, EXCP_RI
);
13029 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13031 TCGv t0
= tcg_temp_new();
13032 TCGv t1
= tcg_temp_new();
13034 gen_load_gpr(t0
, base
);
13037 gen_load_gpr(t1
, index
);
13038 tcg_gen_shli_tl(t1
, t1
, 2);
13039 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13042 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13043 gen_store_gpr(t1
, rd
);
13049 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13050 int base
, int16_t offset
)
13054 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13055 generate_exception_end(ctx
, EXCP_RI
);
13059 t0
= tcg_temp_new();
13060 t1
= tcg_temp_new();
13062 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13067 generate_exception_end(ctx
, EXCP_RI
);
13070 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13071 gen_store_gpr(t1
, rd
);
13072 tcg_gen_movi_tl(t1
, 4);
13073 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13074 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13075 gen_store_gpr(t1
, rd
+1);
13078 gen_load_gpr(t1
, rd
);
13079 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13080 tcg_gen_movi_tl(t1
, 4);
13081 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13082 gen_load_gpr(t1
, rd
+1);
13083 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13085 #ifdef TARGET_MIPS64
13088 generate_exception_end(ctx
, EXCP_RI
);
13091 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13092 gen_store_gpr(t1
, rd
);
13093 tcg_gen_movi_tl(t1
, 8);
13094 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13095 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13096 gen_store_gpr(t1
, rd
+1);
13099 gen_load_gpr(t1
, rd
);
13100 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13101 tcg_gen_movi_tl(t1
, 8);
13102 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13103 gen_load_gpr(t1
, rd
+1);
13104 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13112 static void gen_sync(int stype
)
13114 TCGBar tcg_mo
= TCG_BAR_SC
;
13117 case 0x4: /* SYNC_WMB */
13118 tcg_mo
|= TCG_MO_ST_ST
;
13120 case 0x10: /* SYNC_MB */
13121 tcg_mo
|= TCG_MO_ALL
;
13123 case 0x11: /* SYNC_ACQUIRE */
13124 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13126 case 0x12: /* SYNC_RELEASE */
13127 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13129 case 0x13: /* SYNC_RMB */
13130 tcg_mo
|= TCG_MO_LD_LD
;
13133 tcg_mo
|= TCG_MO_ALL
;
13137 tcg_gen_mb(tcg_mo
);
13140 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13142 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13143 int minor
= (ctx
->opcode
>> 12) & 0xf;
13144 uint32_t mips32_op
;
13146 switch (extension
) {
13148 mips32_op
= OPC_TEQ
;
13151 mips32_op
= OPC_TGE
;
13154 mips32_op
= OPC_TGEU
;
13157 mips32_op
= OPC_TLT
;
13160 mips32_op
= OPC_TLTU
;
13163 mips32_op
= OPC_TNE
;
13165 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13167 #ifndef CONFIG_USER_ONLY
13170 check_cp0_enabled(ctx
);
13172 /* Treat as NOP. */
13175 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13179 check_cp0_enabled(ctx
);
13181 TCGv t0
= tcg_temp_new();
13183 gen_load_gpr(t0
, rt
);
13184 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13190 switch (minor
& 3) {
13192 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13195 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13198 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13201 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13204 goto pool32axf_invalid
;
13208 switch (minor
& 3) {
13210 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13213 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13216 goto pool32axf_invalid
;
13222 check_insn(ctx
, ISA_MIPS32R6
);
13223 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13226 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13229 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13232 mips32_op
= OPC_CLO
;
13235 mips32_op
= OPC_CLZ
;
13237 check_insn(ctx
, ISA_MIPS32
);
13238 gen_cl(ctx
, mips32_op
, rt
, rs
);
13241 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13242 gen_rdhwr(ctx
, rt
, rs
, 0);
13245 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13248 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13249 mips32_op
= OPC_MULT
;
13252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13253 mips32_op
= OPC_MULTU
;
13256 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13257 mips32_op
= OPC_DIV
;
13260 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13261 mips32_op
= OPC_DIVU
;
13264 check_insn(ctx
, ISA_MIPS32
);
13265 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13268 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13269 mips32_op
= OPC_MADD
;
13272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13273 mips32_op
= OPC_MADDU
;
13276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13277 mips32_op
= OPC_MSUB
;
13280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13281 mips32_op
= OPC_MSUBU
;
13283 check_insn(ctx
, ISA_MIPS32
);
13284 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13287 goto pool32axf_invalid
;
13298 generate_exception_err(ctx
, EXCP_CpU
, 2);
13301 goto pool32axf_invalid
;
13306 case JALR
: /* JALRC */
13307 case JALR_HB
: /* JALRC_HB */
13308 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13309 /* JALRC, JALRC_HB */
13310 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13312 /* JALR, JALR_HB */
13313 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13314 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13319 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13320 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13321 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13324 goto pool32axf_invalid
;
13330 check_cp0_enabled(ctx
);
13331 check_insn(ctx
, ISA_MIPS32R2
);
13332 gen_load_srsgpr(rs
, rt
);
13335 check_cp0_enabled(ctx
);
13336 check_insn(ctx
, ISA_MIPS32R2
);
13337 gen_store_srsgpr(rs
, rt
);
13340 goto pool32axf_invalid
;
13343 #ifndef CONFIG_USER_ONLY
13347 mips32_op
= OPC_TLBP
;
13350 mips32_op
= OPC_TLBR
;
13353 mips32_op
= OPC_TLBWI
;
13356 mips32_op
= OPC_TLBWR
;
13359 mips32_op
= OPC_TLBINV
;
13362 mips32_op
= OPC_TLBINVF
;
13365 mips32_op
= OPC_WAIT
;
13368 mips32_op
= OPC_DERET
;
13371 mips32_op
= OPC_ERET
;
13373 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13376 goto pool32axf_invalid
;
13382 check_cp0_enabled(ctx
);
13384 TCGv t0
= tcg_temp_new();
13386 save_cpu_state(ctx
, 1);
13387 gen_helper_di(t0
, cpu_env
);
13388 gen_store_gpr(t0
, rs
);
13389 /* Stop translation as we may have switched the execution mode */
13390 ctx
->bstate
= BS_STOP
;
13395 check_cp0_enabled(ctx
);
13397 TCGv t0
= tcg_temp_new();
13399 save_cpu_state(ctx
, 1);
13400 gen_helper_ei(t0
, cpu_env
);
13401 gen_store_gpr(t0
, rs
);
13402 /* Stop translation as we may have switched the execution mode */
13403 ctx
->bstate
= BS_STOP
;
13408 goto pool32axf_invalid
;
13415 gen_sync(extract32(ctx
->opcode
, 16, 5));
13418 generate_exception_end(ctx
, EXCP_SYSCALL
);
13421 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13422 gen_helper_do_semihosting(cpu_env
);
13424 check_insn(ctx
, ISA_MIPS32
);
13425 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13426 generate_exception_end(ctx
, EXCP_RI
);
13428 generate_exception_end(ctx
, EXCP_DBp
);
13433 goto pool32axf_invalid
;
13437 switch (minor
& 3) {
13439 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13442 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13445 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13448 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13451 goto pool32axf_invalid
;
13455 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13458 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13461 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13464 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13467 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13470 goto pool32axf_invalid
;
13475 MIPS_INVAL("pool32axf");
13476 generate_exception_end(ctx
, EXCP_RI
);
13481 /* Values for microMIPS fmt field. Variable-width, depending on which
13482 formats the instruction supports. */
13501 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13503 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13504 uint32_t mips32_op
;
13506 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13507 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13508 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13510 switch (extension
) {
13511 case FLOAT_1BIT_FMT(CFC1
, 0):
13512 mips32_op
= OPC_CFC1
;
13514 case FLOAT_1BIT_FMT(CTC1
, 0):
13515 mips32_op
= OPC_CTC1
;
13517 case FLOAT_1BIT_FMT(MFC1
, 0):
13518 mips32_op
= OPC_MFC1
;
13520 case FLOAT_1BIT_FMT(MTC1
, 0):
13521 mips32_op
= OPC_MTC1
;
13523 case FLOAT_1BIT_FMT(MFHC1
, 0):
13524 mips32_op
= OPC_MFHC1
;
13526 case FLOAT_1BIT_FMT(MTHC1
, 0):
13527 mips32_op
= OPC_MTHC1
;
13529 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13532 /* Reciprocal square root */
13533 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13534 mips32_op
= OPC_RSQRT_S
;
13536 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13537 mips32_op
= OPC_RSQRT_D
;
13541 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13542 mips32_op
= OPC_SQRT_S
;
13544 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13545 mips32_op
= OPC_SQRT_D
;
13549 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13550 mips32_op
= OPC_RECIP_S
;
13552 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13553 mips32_op
= OPC_RECIP_D
;
13557 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13558 mips32_op
= OPC_FLOOR_L_S
;
13560 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13561 mips32_op
= OPC_FLOOR_L_D
;
13563 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13564 mips32_op
= OPC_FLOOR_W_S
;
13566 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13567 mips32_op
= OPC_FLOOR_W_D
;
13571 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13572 mips32_op
= OPC_CEIL_L_S
;
13574 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13575 mips32_op
= OPC_CEIL_L_D
;
13577 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13578 mips32_op
= OPC_CEIL_W_S
;
13580 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13581 mips32_op
= OPC_CEIL_W_D
;
13585 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13586 mips32_op
= OPC_TRUNC_L_S
;
13588 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13589 mips32_op
= OPC_TRUNC_L_D
;
13591 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13592 mips32_op
= OPC_TRUNC_W_S
;
13594 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13595 mips32_op
= OPC_TRUNC_W_D
;
13599 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13600 mips32_op
= OPC_ROUND_L_S
;
13602 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13603 mips32_op
= OPC_ROUND_L_D
;
13605 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13606 mips32_op
= OPC_ROUND_W_S
;
13608 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13609 mips32_op
= OPC_ROUND_W_D
;
13612 /* Integer to floating-point conversion */
13613 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13614 mips32_op
= OPC_CVT_L_S
;
13616 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13617 mips32_op
= OPC_CVT_L_D
;
13619 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13620 mips32_op
= OPC_CVT_W_S
;
13622 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13623 mips32_op
= OPC_CVT_W_D
;
13626 /* Paired-foo conversions */
13627 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13628 mips32_op
= OPC_CVT_S_PL
;
13630 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13631 mips32_op
= OPC_CVT_S_PU
;
13633 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13634 mips32_op
= OPC_CVT_PW_PS
;
13636 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13637 mips32_op
= OPC_CVT_PS_PW
;
13640 /* Floating-point moves */
13641 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13642 mips32_op
= OPC_MOV_S
;
13644 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13645 mips32_op
= OPC_MOV_D
;
13647 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13648 mips32_op
= OPC_MOV_PS
;
13651 /* Absolute value */
13652 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13653 mips32_op
= OPC_ABS_S
;
13655 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13656 mips32_op
= OPC_ABS_D
;
13658 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13659 mips32_op
= OPC_ABS_PS
;
13663 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13664 mips32_op
= OPC_NEG_S
;
13666 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13667 mips32_op
= OPC_NEG_D
;
13669 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13670 mips32_op
= OPC_NEG_PS
;
13673 /* Reciprocal square root step */
13674 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13675 mips32_op
= OPC_RSQRT1_S
;
13677 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13678 mips32_op
= OPC_RSQRT1_D
;
13680 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13681 mips32_op
= OPC_RSQRT1_PS
;
13684 /* Reciprocal step */
13685 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13686 mips32_op
= OPC_RECIP1_S
;
13688 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13689 mips32_op
= OPC_RECIP1_S
;
13691 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13692 mips32_op
= OPC_RECIP1_PS
;
13695 /* Conversions from double */
13696 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13697 mips32_op
= OPC_CVT_D_S
;
13699 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13700 mips32_op
= OPC_CVT_D_W
;
13702 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13703 mips32_op
= OPC_CVT_D_L
;
13706 /* Conversions from single */
13707 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13708 mips32_op
= OPC_CVT_S_D
;
13710 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13711 mips32_op
= OPC_CVT_S_W
;
13713 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13714 mips32_op
= OPC_CVT_S_L
;
13716 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13719 /* Conditional moves on floating-point codes */
13720 case COND_FLOAT_MOV(MOVT
, 0):
13721 case COND_FLOAT_MOV(MOVT
, 1):
13722 case COND_FLOAT_MOV(MOVT
, 2):
13723 case COND_FLOAT_MOV(MOVT
, 3):
13724 case COND_FLOAT_MOV(MOVT
, 4):
13725 case COND_FLOAT_MOV(MOVT
, 5):
13726 case COND_FLOAT_MOV(MOVT
, 6):
13727 case COND_FLOAT_MOV(MOVT
, 7):
13728 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13729 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13731 case COND_FLOAT_MOV(MOVF
, 0):
13732 case COND_FLOAT_MOV(MOVF
, 1):
13733 case COND_FLOAT_MOV(MOVF
, 2):
13734 case COND_FLOAT_MOV(MOVF
, 3):
13735 case COND_FLOAT_MOV(MOVF
, 4):
13736 case COND_FLOAT_MOV(MOVF
, 5):
13737 case COND_FLOAT_MOV(MOVF
, 6):
13738 case COND_FLOAT_MOV(MOVF
, 7):
13739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13740 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13743 MIPS_INVAL("pool32fxf");
13744 generate_exception_end(ctx
, EXCP_RI
);
13749 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13753 int rt
, rs
, rd
, rr
;
13755 uint32_t op
, minor
, mips32_op
;
13756 uint32_t cond
, fmt
, cc
;
13758 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13759 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13761 rt
= (ctx
->opcode
>> 21) & 0x1f;
13762 rs
= (ctx
->opcode
>> 16) & 0x1f;
13763 rd
= (ctx
->opcode
>> 11) & 0x1f;
13764 rr
= (ctx
->opcode
>> 6) & 0x1f;
13765 imm
= (int16_t) ctx
->opcode
;
13767 op
= (ctx
->opcode
>> 26) & 0x3f;
13770 minor
= ctx
->opcode
& 0x3f;
13773 minor
= (ctx
->opcode
>> 6) & 0xf;
13776 mips32_op
= OPC_SLL
;
13779 mips32_op
= OPC_SRA
;
13782 mips32_op
= OPC_SRL
;
13785 mips32_op
= OPC_ROTR
;
13787 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13790 check_insn(ctx
, ISA_MIPS32R6
);
13791 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13794 check_insn(ctx
, ISA_MIPS32R6
);
13795 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13798 check_insn(ctx
, ISA_MIPS32R6
);
13799 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13802 goto pool32a_invalid
;
13806 minor
= (ctx
->opcode
>> 6) & 0xf;
13810 mips32_op
= OPC_ADD
;
13813 mips32_op
= OPC_ADDU
;
13816 mips32_op
= OPC_SUB
;
13819 mips32_op
= OPC_SUBU
;
13822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13823 mips32_op
= OPC_MUL
;
13825 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13829 mips32_op
= OPC_SLLV
;
13832 mips32_op
= OPC_SRLV
;
13835 mips32_op
= OPC_SRAV
;
13838 mips32_op
= OPC_ROTRV
;
13840 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13842 /* Logical operations */
13844 mips32_op
= OPC_AND
;
13847 mips32_op
= OPC_OR
;
13850 mips32_op
= OPC_NOR
;
13853 mips32_op
= OPC_XOR
;
13855 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13857 /* Set less than */
13859 mips32_op
= OPC_SLT
;
13862 mips32_op
= OPC_SLTU
;
13864 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13867 goto pool32a_invalid
;
13871 minor
= (ctx
->opcode
>> 6) & 0xf;
13873 /* Conditional moves */
13874 case MOVN
: /* MUL */
13875 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13877 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13880 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13883 case MOVZ
: /* MUH */
13884 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13886 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13889 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13893 check_insn(ctx
, ISA_MIPS32R6
);
13894 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13897 check_insn(ctx
, ISA_MIPS32R6
);
13898 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13900 case LWXS
: /* DIV */
13901 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13903 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13906 gen_ldxs(ctx
, rs
, rt
, rd
);
13910 check_insn(ctx
, ISA_MIPS32R6
);
13911 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13914 check_insn(ctx
, ISA_MIPS32R6
);
13915 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13918 check_insn(ctx
, ISA_MIPS32R6
);
13919 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13922 goto pool32a_invalid
;
13926 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13929 check_insn(ctx
, ISA_MIPS32R6
);
13930 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13931 extract32(ctx
->opcode
, 9, 2));
13934 check_insn(ctx
, ISA_MIPS32R6
);
13935 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13936 extract32(ctx
->opcode
, 9, 2));
13939 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13942 gen_pool32axf(env
, ctx
, rt
, rs
);
13945 generate_exception_end(ctx
, EXCP_BREAK
);
13948 check_insn(ctx
, ISA_MIPS32R6
);
13949 generate_exception_end(ctx
, EXCP_RI
);
13953 MIPS_INVAL("pool32a");
13954 generate_exception_end(ctx
, EXCP_RI
);
13959 minor
= (ctx
->opcode
>> 12) & 0xf;
13962 check_cp0_enabled(ctx
);
13963 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13964 gen_cache_operation(ctx
, rt
, rs
, imm
);
13969 /* COP2: Not implemented. */
13970 generate_exception_err(ctx
, EXCP_CpU
, 2);
13972 #ifdef TARGET_MIPS64
13975 check_insn(ctx
, ISA_MIPS3
);
13976 check_mips_64(ctx
);
13981 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13983 #ifdef TARGET_MIPS64
13986 check_insn(ctx
, ISA_MIPS3
);
13987 check_mips_64(ctx
);
13992 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13995 MIPS_INVAL("pool32b");
13996 generate_exception_end(ctx
, EXCP_RI
);
14001 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14002 minor
= ctx
->opcode
& 0x3f;
14003 check_cp1_enabled(ctx
);
14006 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14007 mips32_op
= OPC_ALNV_PS
;
14010 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14011 mips32_op
= OPC_MADD_S
;
14014 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14015 mips32_op
= OPC_MADD_D
;
14018 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14019 mips32_op
= OPC_MADD_PS
;
14022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14023 mips32_op
= OPC_MSUB_S
;
14026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14027 mips32_op
= OPC_MSUB_D
;
14030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14031 mips32_op
= OPC_MSUB_PS
;
14034 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14035 mips32_op
= OPC_NMADD_S
;
14038 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14039 mips32_op
= OPC_NMADD_D
;
14042 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14043 mips32_op
= OPC_NMADD_PS
;
14046 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14047 mips32_op
= OPC_NMSUB_S
;
14050 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14051 mips32_op
= OPC_NMSUB_D
;
14054 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14055 mips32_op
= OPC_NMSUB_PS
;
14057 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14059 case CABS_COND_FMT
:
14060 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14061 cond
= (ctx
->opcode
>> 6) & 0xf;
14062 cc
= (ctx
->opcode
>> 13) & 0x7;
14063 fmt
= (ctx
->opcode
>> 10) & 0x3;
14066 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14069 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14072 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14075 goto pool32f_invalid
;
14079 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14080 cond
= (ctx
->opcode
>> 6) & 0xf;
14081 cc
= (ctx
->opcode
>> 13) & 0x7;
14082 fmt
= (ctx
->opcode
>> 10) & 0x3;
14085 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14088 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14091 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14094 goto pool32f_invalid
;
14098 check_insn(ctx
, ISA_MIPS32R6
);
14099 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14102 check_insn(ctx
, ISA_MIPS32R6
);
14103 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14106 gen_pool32fxf(ctx
, rt
, rs
);
14110 switch ((ctx
->opcode
>> 6) & 0x7) {
14112 mips32_op
= OPC_PLL_PS
;
14115 mips32_op
= OPC_PLU_PS
;
14118 mips32_op
= OPC_PUL_PS
;
14121 mips32_op
= OPC_PUU_PS
;
14124 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14125 mips32_op
= OPC_CVT_PS_S
;
14127 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14130 goto pool32f_invalid
;
14134 check_insn(ctx
, ISA_MIPS32R6
);
14135 switch ((ctx
->opcode
>> 9) & 0x3) {
14137 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14140 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14143 goto pool32f_invalid
;
14148 switch ((ctx
->opcode
>> 6) & 0x7) {
14150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14151 mips32_op
= OPC_LWXC1
;
14154 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14155 mips32_op
= OPC_SWXC1
;
14158 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14159 mips32_op
= OPC_LDXC1
;
14162 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14163 mips32_op
= OPC_SDXC1
;
14166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14167 mips32_op
= OPC_LUXC1
;
14170 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14171 mips32_op
= OPC_SUXC1
;
14173 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14176 goto pool32f_invalid
;
14180 check_insn(ctx
, ISA_MIPS32R6
);
14181 switch ((ctx
->opcode
>> 9) & 0x3) {
14183 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14186 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14189 goto pool32f_invalid
;
14194 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14195 fmt
= (ctx
->opcode
>> 9) & 0x3;
14196 switch ((ctx
->opcode
>> 6) & 0x7) {
14200 mips32_op
= OPC_RSQRT2_S
;
14203 mips32_op
= OPC_RSQRT2_D
;
14206 mips32_op
= OPC_RSQRT2_PS
;
14209 goto pool32f_invalid
;
14215 mips32_op
= OPC_RECIP2_S
;
14218 mips32_op
= OPC_RECIP2_D
;
14221 mips32_op
= OPC_RECIP2_PS
;
14224 goto pool32f_invalid
;
14228 mips32_op
= OPC_ADDR_PS
;
14231 mips32_op
= OPC_MULR_PS
;
14233 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14236 goto pool32f_invalid
;
14240 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14241 cc
= (ctx
->opcode
>> 13) & 0x7;
14242 fmt
= (ctx
->opcode
>> 9) & 0x3;
14243 switch ((ctx
->opcode
>> 6) & 0x7) {
14244 case MOVF_FMT
: /* RINT_FMT */
14245 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14249 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14252 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14255 goto pool32f_invalid
;
14261 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14264 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14268 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14271 goto pool32f_invalid
;
14275 case MOVT_FMT
: /* CLASS_FMT */
14276 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14280 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14283 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14286 goto pool32f_invalid
;
14292 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14295 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14299 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14302 goto pool32f_invalid
;
14307 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14310 goto pool32f_invalid
;
14313 #define FINSN_3ARG_SDPS(prfx) \
14314 switch ((ctx->opcode >> 8) & 0x3) { \
14316 mips32_op = OPC_##prfx##_S; \
14319 mips32_op = OPC_##prfx##_D; \
14321 case FMT_SDPS_PS: \
14323 mips32_op = OPC_##prfx##_PS; \
14326 goto pool32f_invalid; \
14329 check_insn(ctx
, ISA_MIPS32R6
);
14330 switch ((ctx
->opcode
>> 9) & 0x3) {
14332 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14335 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14338 goto pool32f_invalid
;
14342 check_insn(ctx
, ISA_MIPS32R6
);
14343 switch ((ctx
->opcode
>> 9) & 0x3) {
14345 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14348 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14351 goto pool32f_invalid
;
14355 /* regular FP ops */
14356 switch ((ctx
->opcode
>> 6) & 0x3) {
14358 FINSN_3ARG_SDPS(ADD
);
14361 FINSN_3ARG_SDPS(SUB
);
14364 FINSN_3ARG_SDPS(MUL
);
14367 fmt
= (ctx
->opcode
>> 8) & 0x3;
14369 mips32_op
= OPC_DIV_D
;
14370 } else if (fmt
== 0) {
14371 mips32_op
= OPC_DIV_S
;
14373 goto pool32f_invalid
;
14377 goto pool32f_invalid
;
14382 switch ((ctx
->opcode
>> 6) & 0x7) {
14383 case MOVN_FMT
: /* SELNEZ_FMT */
14384 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14386 switch ((ctx
->opcode
>> 9) & 0x3) {
14388 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14391 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14394 goto pool32f_invalid
;
14398 FINSN_3ARG_SDPS(MOVN
);
14402 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14403 FINSN_3ARG_SDPS(MOVN
);
14405 case MOVZ_FMT
: /* SELEQZ_FMT */
14406 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14408 switch ((ctx
->opcode
>> 9) & 0x3) {
14410 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14413 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14416 goto pool32f_invalid
;
14420 FINSN_3ARG_SDPS(MOVZ
);
14424 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14425 FINSN_3ARG_SDPS(MOVZ
);
14428 check_insn(ctx
, ISA_MIPS32R6
);
14429 switch ((ctx
->opcode
>> 9) & 0x3) {
14431 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14434 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14437 goto pool32f_invalid
;
14441 check_insn(ctx
, ISA_MIPS32R6
);
14442 switch ((ctx
->opcode
>> 9) & 0x3) {
14444 mips32_op
= OPC_MADDF_S
;
14447 mips32_op
= OPC_MADDF_D
;
14450 goto pool32f_invalid
;
14454 check_insn(ctx
, ISA_MIPS32R6
);
14455 switch ((ctx
->opcode
>> 9) & 0x3) {
14457 mips32_op
= OPC_MSUBF_S
;
14460 mips32_op
= OPC_MSUBF_D
;
14463 goto pool32f_invalid
;
14467 goto pool32f_invalid
;
14471 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14475 MIPS_INVAL("pool32f");
14476 generate_exception_end(ctx
, EXCP_RI
);
14480 generate_exception_err(ctx
, EXCP_CpU
, 1);
14484 minor
= (ctx
->opcode
>> 21) & 0x1f;
14487 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14488 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14491 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14492 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14493 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14496 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14497 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14498 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14502 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14505 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14506 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14507 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14510 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14511 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14512 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14516 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14520 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14524 case TLTI
: /* BC1EQZC */
14525 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14527 check_cp1_enabled(ctx
);
14528 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14531 mips32_op
= OPC_TLTI
;
14535 case TGEI
: /* BC1NEZC */
14536 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14538 check_cp1_enabled(ctx
);
14539 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14542 mips32_op
= OPC_TGEI
;
14547 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14548 mips32_op
= OPC_TLTIU
;
14551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14552 mips32_op
= OPC_TGEIU
;
14554 case TNEI
: /* SYNCI */
14555 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14557 /* Break the TB to be able to sync copied instructions
14559 ctx
->bstate
= BS_STOP
;
14562 mips32_op
= OPC_TNEI
;
14567 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14568 mips32_op
= OPC_TEQI
;
14570 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14575 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14576 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14577 4, rs
, 0, imm
<< 1, 0);
14578 /* Compact branches don't have a delay slot, so just let
14579 the normal delay slot handling take us to the branch
14583 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14584 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14587 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14588 /* Break the TB to be able to sync copied instructions
14590 ctx
->bstate
= BS_STOP
;
14594 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14595 /* COP2: Not implemented. */
14596 generate_exception_err(ctx
, EXCP_CpU
, 2);
14599 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14600 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14604 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14607 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14608 mips32_op
= OPC_BC1FANY4
;
14611 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14612 mips32_op
= OPC_BC1TANY4
;
14615 check_insn(ctx
, ASE_MIPS3D
);
14618 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14619 check_cp1_enabled(ctx
);
14620 gen_compute_branch1(ctx
, mips32_op
,
14621 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14623 generate_exception_err(ctx
, EXCP_CpU
, 1);
14628 /* MIPS DSP: not implemented */
14631 MIPS_INVAL("pool32i");
14632 generate_exception_end(ctx
, EXCP_RI
);
14637 minor
= (ctx
->opcode
>> 12) & 0xf;
14638 offset
= sextract32(ctx
->opcode
, 0,
14639 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14643 mips32_op
= OPC_LWL
;
14646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14647 mips32_op
= OPC_SWL
;
14650 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14651 mips32_op
= OPC_LWR
;
14654 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14655 mips32_op
= OPC_SWR
;
14657 #if defined(TARGET_MIPS64)
14659 check_insn(ctx
, ISA_MIPS3
);
14660 check_mips_64(ctx
);
14661 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14662 mips32_op
= OPC_LDL
;
14665 check_insn(ctx
, ISA_MIPS3
);
14666 check_mips_64(ctx
);
14667 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14668 mips32_op
= OPC_SDL
;
14671 check_insn(ctx
, ISA_MIPS3
);
14672 check_mips_64(ctx
);
14673 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14674 mips32_op
= OPC_LDR
;
14677 check_insn(ctx
, ISA_MIPS3
);
14678 check_mips_64(ctx
);
14679 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14680 mips32_op
= OPC_SDR
;
14683 check_insn(ctx
, ISA_MIPS3
);
14684 check_mips_64(ctx
);
14685 mips32_op
= OPC_LWU
;
14688 check_insn(ctx
, ISA_MIPS3
);
14689 check_mips_64(ctx
);
14690 mips32_op
= OPC_LLD
;
14694 mips32_op
= OPC_LL
;
14697 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14700 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14703 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14705 #if defined(TARGET_MIPS64)
14707 check_insn(ctx
, ISA_MIPS3
);
14708 check_mips_64(ctx
);
14709 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14713 /* Treat as no-op */
14714 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14715 /* hint codes 24-31 are reserved and signal RI */
14716 generate_exception(ctx
, EXCP_RI
);
14720 MIPS_INVAL("pool32c");
14721 generate_exception_end(ctx
, EXCP_RI
);
14725 case ADDI32
: /* AUI, LUI */
14726 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14728 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14731 mips32_op
= OPC_ADDI
;
14736 mips32_op
= OPC_ADDIU
;
14738 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14741 /* Logical operations */
14743 mips32_op
= OPC_ORI
;
14746 mips32_op
= OPC_XORI
;
14749 mips32_op
= OPC_ANDI
;
14751 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14754 /* Set less than immediate */
14756 mips32_op
= OPC_SLTI
;
14759 mips32_op
= OPC_SLTIU
;
14761 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14764 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14765 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14766 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14767 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14769 case JALS32
: /* BOVC, BEQC, BEQZALC */
14770 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14773 mips32_op
= OPC_BOVC
;
14774 } else if (rs
< rt
&& rs
== 0) {
14776 mips32_op
= OPC_BEQZALC
;
14779 mips32_op
= OPC_BEQC
;
14781 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14784 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14785 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14786 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14789 case BEQ32
: /* BC */
14790 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14792 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14793 sextract32(ctx
->opcode
<< 1, 0, 27));
14796 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14799 case BNE32
: /* BALC */
14800 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14802 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14803 sextract32(ctx
->opcode
<< 1, 0, 27));
14806 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14809 case J32
: /* BGTZC, BLTZC, BLTC */
14810 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14811 if (rs
== 0 && rt
!= 0) {
14813 mips32_op
= OPC_BGTZC
;
14814 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14816 mips32_op
= OPC_BLTZC
;
14819 mips32_op
= OPC_BLTC
;
14821 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14824 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14825 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14828 case JAL32
: /* BLEZC, BGEZC, BGEC */
14829 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14830 if (rs
== 0 && rt
!= 0) {
14832 mips32_op
= OPC_BLEZC
;
14833 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14835 mips32_op
= OPC_BGEZC
;
14838 mips32_op
= OPC_BGEC
;
14840 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14843 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14844 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14845 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14848 /* Floating point (COP1) */
14850 mips32_op
= OPC_LWC1
;
14853 mips32_op
= OPC_LDC1
;
14856 mips32_op
= OPC_SWC1
;
14859 mips32_op
= OPC_SDC1
;
14861 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14863 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14864 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14865 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14866 switch ((ctx
->opcode
>> 16) & 0x1f) {
14867 case ADDIUPC_00
... ADDIUPC_07
:
14868 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14871 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14874 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14876 case LWPC_08
... LWPC_0F
:
14877 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14880 generate_exception(ctx
, EXCP_RI
);
14885 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14886 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14888 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14891 case BNVC
: /* BNEC, BNEZALC */
14892 check_insn(ctx
, ISA_MIPS32R6
);
14895 mips32_op
= OPC_BNVC
;
14896 } else if (rs
< rt
&& rs
== 0) {
14898 mips32_op
= OPC_BNEZALC
;
14901 mips32_op
= OPC_BNEC
;
14903 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14905 case R6_BNEZC
: /* JIALC */
14906 check_insn(ctx
, ISA_MIPS32R6
);
14909 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14910 sextract32(ctx
->opcode
<< 1, 0, 22));
14913 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14916 case R6_BEQZC
: /* JIC */
14917 check_insn(ctx
, ISA_MIPS32R6
);
14920 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14921 sextract32(ctx
->opcode
<< 1, 0, 22));
14924 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14927 case BLEZALC
: /* BGEZALC, BGEUC */
14928 check_insn(ctx
, ISA_MIPS32R6
);
14929 if (rs
== 0 && rt
!= 0) {
14931 mips32_op
= OPC_BLEZALC
;
14932 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14934 mips32_op
= OPC_BGEZALC
;
14937 mips32_op
= OPC_BGEUC
;
14939 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14941 case BGTZALC
: /* BLTZALC, BLTUC */
14942 check_insn(ctx
, ISA_MIPS32R6
);
14943 if (rs
== 0 && rt
!= 0) {
14945 mips32_op
= OPC_BGTZALC
;
14946 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14948 mips32_op
= OPC_BLTZALC
;
14951 mips32_op
= OPC_BLTUC
;
14953 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14955 /* Loads and stores */
14957 mips32_op
= OPC_LB
;
14960 mips32_op
= OPC_LBU
;
14963 mips32_op
= OPC_LH
;
14966 mips32_op
= OPC_LHU
;
14969 mips32_op
= OPC_LW
;
14971 #ifdef TARGET_MIPS64
14973 check_insn(ctx
, ISA_MIPS3
);
14974 check_mips_64(ctx
);
14975 mips32_op
= OPC_LD
;
14978 check_insn(ctx
, ISA_MIPS3
);
14979 check_mips_64(ctx
);
14980 mips32_op
= OPC_SD
;
14984 mips32_op
= OPC_SB
;
14987 mips32_op
= OPC_SH
;
14990 mips32_op
= OPC_SW
;
14993 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14996 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14999 generate_exception_end(ctx
, EXCP_RI
);
15004 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15008 /* make sure instructions are on a halfword boundary */
15009 if (ctx
->pc
& 0x1) {
15010 env
->CP0_BadVAddr
= ctx
->pc
;
15011 generate_exception_end(ctx
, EXCP_AdEL
);
15015 op
= (ctx
->opcode
>> 10) & 0x3f;
15016 /* Enforce properly-sized instructions in a delay slot */
15017 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15018 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15020 /* POOL32A, POOL32B, POOL32I, POOL32C */
15022 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15024 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15026 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15028 /* LB32, LH32, LWC132, LDC132, LW32 */
15029 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15030 generate_exception_end(ctx
, EXCP_RI
);
15035 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15037 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15039 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15040 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15041 generate_exception_end(ctx
, EXCP_RI
);
15051 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15052 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15053 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15056 switch (ctx
->opcode
& 0x1) {
15064 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15065 /* In the Release 6 the register number location in
15066 * the instruction encoding has changed.
15068 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15070 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15076 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15077 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15078 int amount
= (ctx
->opcode
>> 1) & 0x7;
15080 amount
= amount
== 0 ? 8 : amount
;
15082 switch (ctx
->opcode
& 0x1) {
15091 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15095 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15096 gen_pool16c_r6_insn(ctx
);
15098 gen_pool16c_insn(ctx
);
15103 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15104 int rb
= 28; /* GP */
15105 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15107 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15111 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15112 if (ctx
->opcode
& 1) {
15113 generate_exception_end(ctx
, EXCP_RI
);
15116 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15117 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15118 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15119 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15124 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15125 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15126 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15127 offset
= (offset
== 0xf ? -1 : offset
);
15129 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15134 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15135 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15136 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15138 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15143 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15144 int rb
= 29; /* SP */
15145 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15147 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15152 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15153 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15154 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15156 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15161 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15162 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15163 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15165 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15170 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15171 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15172 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15174 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15179 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15180 int rb
= 29; /* SP */
15181 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15183 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15188 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15189 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15190 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15192 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15197 int rd
= uMIPS_RD5(ctx
->opcode
);
15198 int rs
= uMIPS_RS5(ctx
->opcode
);
15200 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15207 switch (ctx
->opcode
& 0x1) {
15217 switch (ctx
->opcode
& 0x1) {
15222 gen_addiur1sp(ctx
);
15226 case B16
: /* BC16 */
15227 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15228 sextract32(ctx
->opcode
, 0, 10) << 1,
15229 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15231 case BNEZ16
: /* BNEZC16 */
15232 case BEQZ16
: /* BEQZC16 */
15233 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15234 mmreg(uMIPS_RD(ctx
->opcode
)),
15235 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15236 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15241 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15242 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15244 imm
= (imm
== 0x7f ? -1 : imm
);
15245 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15251 generate_exception_end(ctx
, EXCP_RI
);
15254 decode_micromips32_opc(env
, ctx
);
15261 /* SmartMIPS extension to MIPS32 */
15263 #if defined(TARGET_MIPS64)
15265 /* MDMX extension to MIPS64 */
15269 /* MIPSDSP functions. */
15270 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15271 int rd
, int base
, int offset
)
15276 t0
= tcg_temp_new();
15279 gen_load_gpr(t0
, offset
);
15280 } else if (offset
== 0) {
15281 gen_load_gpr(t0
, base
);
15283 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15288 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15289 gen_store_gpr(t0
, rd
);
15292 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15293 gen_store_gpr(t0
, rd
);
15296 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15297 gen_store_gpr(t0
, rd
);
15299 #if defined(TARGET_MIPS64)
15301 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15302 gen_store_gpr(t0
, rd
);
15309 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15310 int ret
, int v1
, int v2
)
15316 /* Treat as NOP. */
15320 v1_t
= tcg_temp_new();
15321 v2_t
= tcg_temp_new();
15323 gen_load_gpr(v1_t
, v1
);
15324 gen_load_gpr(v2_t
, v2
);
15327 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15328 case OPC_MULT_G_2E
:
15332 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15334 case OPC_ADDUH_R_QB
:
15335 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15338 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15340 case OPC_ADDQH_R_PH
:
15341 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15344 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15346 case OPC_ADDQH_R_W
:
15347 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15350 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15352 case OPC_SUBUH_R_QB
:
15353 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15356 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15358 case OPC_SUBQH_R_PH
:
15359 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15362 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15364 case OPC_SUBQH_R_W
:
15365 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15369 case OPC_ABSQ_S_PH_DSP
:
15371 case OPC_ABSQ_S_QB
:
15373 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15375 case OPC_ABSQ_S_PH
:
15377 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15381 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15383 case OPC_PRECEQ_W_PHL
:
15385 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15386 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15388 case OPC_PRECEQ_W_PHR
:
15390 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15391 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15392 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15394 case OPC_PRECEQU_PH_QBL
:
15396 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15398 case OPC_PRECEQU_PH_QBR
:
15400 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15402 case OPC_PRECEQU_PH_QBLA
:
15404 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15406 case OPC_PRECEQU_PH_QBRA
:
15408 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15410 case OPC_PRECEU_PH_QBL
:
15412 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15414 case OPC_PRECEU_PH_QBR
:
15416 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15418 case OPC_PRECEU_PH_QBLA
:
15420 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15422 case OPC_PRECEU_PH_QBRA
:
15424 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15428 case OPC_ADDU_QB_DSP
:
15432 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15434 case OPC_ADDQ_S_PH
:
15436 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15440 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15444 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15446 case OPC_ADDU_S_QB
:
15448 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15452 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15454 case OPC_ADDU_S_PH
:
15456 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15460 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15462 case OPC_SUBQ_S_PH
:
15464 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15468 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15472 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15474 case OPC_SUBU_S_QB
:
15476 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15480 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15482 case OPC_SUBU_S_PH
:
15484 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15488 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15492 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15496 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15498 case OPC_RADDU_W_QB
:
15500 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15504 case OPC_CMPU_EQ_QB_DSP
:
15506 case OPC_PRECR_QB_PH
:
15508 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15510 case OPC_PRECRQ_QB_PH
:
15512 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15514 case OPC_PRECR_SRA_PH_W
:
15517 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15518 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15520 tcg_temp_free_i32(sa_t
);
15523 case OPC_PRECR_SRA_R_PH_W
:
15526 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15527 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15529 tcg_temp_free_i32(sa_t
);
15532 case OPC_PRECRQ_PH_W
:
15534 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15536 case OPC_PRECRQ_RS_PH_W
:
15538 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15540 case OPC_PRECRQU_S_QB_PH
:
15542 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15546 #ifdef TARGET_MIPS64
15547 case OPC_ABSQ_S_QH_DSP
:
15549 case OPC_PRECEQ_L_PWL
:
15551 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15553 case OPC_PRECEQ_L_PWR
:
15555 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15557 case OPC_PRECEQ_PW_QHL
:
15559 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15561 case OPC_PRECEQ_PW_QHR
:
15563 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15565 case OPC_PRECEQ_PW_QHLA
:
15567 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15569 case OPC_PRECEQ_PW_QHRA
:
15571 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15573 case OPC_PRECEQU_QH_OBL
:
15575 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15577 case OPC_PRECEQU_QH_OBR
:
15579 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15581 case OPC_PRECEQU_QH_OBLA
:
15583 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15585 case OPC_PRECEQU_QH_OBRA
:
15587 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15589 case OPC_PRECEU_QH_OBL
:
15591 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15593 case OPC_PRECEU_QH_OBR
:
15595 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15597 case OPC_PRECEU_QH_OBLA
:
15599 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15601 case OPC_PRECEU_QH_OBRA
:
15603 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15605 case OPC_ABSQ_S_OB
:
15607 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15609 case OPC_ABSQ_S_PW
:
15611 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15613 case OPC_ABSQ_S_QH
:
15615 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15619 case OPC_ADDU_OB_DSP
:
15621 case OPC_RADDU_L_OB
:
15623 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15627 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15629 case OPC_SUBQ_S_PW
:
15631 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15635 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15637 case OPC_SUBQ_S_QH
:
15639 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15643 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15645 case OPC_SUBU_S_OB
:
15647 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15651 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15653 case OPC_SUBU_S_QH
:
15655 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15659 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15661 case OPC_SUBUH_R_OB
:
15663 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15667 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15669 case OPC_ADDQ_S_PW
:
15671 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15675 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15677 case OPC_ADDQ_S_QH
:
15679 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15683 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15685 case OPC_ADDU_S_OB
:
15687 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15691 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15693 case OPC_ADDU_S_QH
:
15695 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15699 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15701 case OPC_ADDUH_R_OB
:
15703 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15707 case OPC_CMPU_EQ_OB_DSP
:
15709 case OPC_PRECR_OB_QH
:
15711 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15713 case OPC_PRECR_SRA_QH_PW
:
15716 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15717 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15718 tcg_temp_free_i32(ret_t
);
15721 case OPC_PRECR_SRA_R_QH_PW
:
15724 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15725 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15726 tcg_temp_free_i32(sa_v
);
15729 case OPC_PRECRQ_OB_QH
:
15731 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15733 case OPC_PRECRQ_PW_L
:
15735 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15737 case OPC_PRECRQ_QH_PW
:
15739 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15741 case OPC_PRECRQ_RS_QH_PW
:
15743 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15745 case OPC_PRECRQU_S_OB_QH
:
15747 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15754 tcg_temp_free(v1_t
);
15755 tcg_temp_free(v2_t
);
15758 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15759 int ret
, int v1
, int v2
)
15767 /* Treat as NOP. */
15771 t0
= tcg_temp_new();
15772 v1_t
= tcg_temp_new();
15773 v2_t
= tcg_temp_new();
15775 tcg_gen_movi_tl(t0
, v1
);
15776 gen_load_gpr(v1_t
, v1
);
15777 gen_load_gpr(v2_t
, v2
);
15780 case OPC_SHLL_QB_DSP
:
15782 op2
= MASK_SHLL_QB(ctx
->opcode
);
15786 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15790 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15794 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15798 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15800 case OPC_SHLL_S_PH
:
15802 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15804 case OPC_SHLLV_S_PH
:
15806 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15810 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15812 case OPC_SHLLV_S_W
:
15814 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15818 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15822 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15826 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15830 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15834 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15836 case OPC_SHRA_R_QB
:
15838 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15842 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15844 case OPC_SHRAV_R_QB
:
15846 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15850 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15852 case OPC_SHRA_R_PH
:
15854 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15858 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15860 case OPC_SHRAV_R_PH
:
15862 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15866 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15868 case OPC_SHRAV_R_W
:
15870 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15872 default: /* Invalid */
15873 MIPS_INVAL("MASK SHLL.QB");
15874 generate_exception_end(ctx
, EXCP_RI
);
15879 #ifdef TARGET_MIPS64
15880 case OPC_SHLL_OB_DSP
:
15881 op2
= MASK_SHLL_OB(ctx
->opcode
);
15885 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15889 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15891 case OPC_SHLL_S_PW
:
15893 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15895 case OPC_SHLLV_S_PW
:
15897 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15901 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15905 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15909 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15913 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15915 case OPC_SHLL_S_QH
:
15917 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15919 case OPC_SHLLV_S_QH
:
15921 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15925 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15929 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15931 case OPC_SHRA_R_OB
:
15933 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15935 case OPC_SHRAV_R_OB
:
15937 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15941 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15945 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15947 case OPC_SHRA_R_PW
:
15949 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15951 case OPC_SHRAV_R_PW
:
15953 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15957 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15961 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15963 case OPC_SHRA_R_QH
:
15965 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15967 case OPC_SHRAV_R_QH
:
15969 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15973 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15977 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15981 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15985 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15987 default: /* Invalid */
15988 MIPS_INVAL("MASK SHLL.OB");
15989 generate_exception_end(ctx
, EXCP_RI
);
15997 tcg_temp_free(v1_t
);
15998 tcg_temp_free(v2_t
);
16001 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16002 int ret
, int v1
, int v2
, int check_ret
)
16008 if ((ret
== 0) && (check_ret
== 1)) {
16009 /* Treat as NOP. */
16013 t0
= tcg_temp_new_i32();
16014 v1_t
= tcg_temp_new();
16015 v2_t
= tcg_temp_new();
16017 tcg_gen_movi_i32(t0
, ret
);
16018 gen_load_gpr(v1_t
, v1
);
16019 gen_load_gpr(v2_t
, v2
);
16022 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16023 * the same mask and op1. */
16024 case OPC_MULT_G_2E
:
16028 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16031 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16034 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16036 case OPC_MULQ_RS_W
:
16037 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16041 case OPC_DPA_W_PH_DSP
:
16043 case OPC_DPAU_H_QBL
:
16045 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16047 case OPC_DPAU_H_QBR
:
16049 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16051 case OPC_DPSU_H_QBL
:
16053 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16055 case OPC_DPSU_H_QBR
:
16057 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16061 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16063 case OPC_DPAX_W_PH
:
16065 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16067 case OPC_DPAQ_S_W_PH
:
16069 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16071 case OPC_DPAQX_S_W_PH
:
16073 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16075 case OPC_DPAQX_SA_W_PH
:
16077 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16081 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16083 case OPC_DPSX_W_PH
:
16085 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16087 case OPC_DPSQ_S_W_PH
:
16089 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16091 case OPC_DPSQX_S_W_PH
:
16093 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16095 case OPC_DPSQX_SA_W_PH
:
16097 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16099 case OPC_MULSAQ_S_W_PH
:
16101 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16103 case OPC_DPAQ_SA_L_W
:
16105 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16107 case OPC_DPSQ_SA_L_W
:
16109 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16111 case OPC_MAQ_S_W_PHL
:
16113 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16115 case OPC_MAQ_S_W_PHR
:
16117 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16119 case OPC_MAQ_SA_W_PHL
:
16121 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16123 case OPC_MAQ_SA_W_PHR
:
16125 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16127 case OPC_MULSA_W_PH
:
16129 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16133 #ifdef TARGET_MIPS64
16134 case OPC_DPAQ_W_QH_DSP
:
16136 int ac
= ret
& 0x03;
16137 tcg_gen_movi_i32(t0
, ac
);
16142 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16146 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16150 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16154 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16158 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16160 case OPC_DPAQ_S_W_QH
:
16162 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16164 case OPC_DPAQ_SA_L_PW
:
16166 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16168 case OPC_DPAU_H_OBL
:
16170 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16172 case OPC_DPAU_H_OBR
:
16174 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16178 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16180 case OPC_DPSQ_S_W_QH
:
16182 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16184 case OPC_DPSQ_SA_L_PW
:
16186 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16188 case OPC_DPSU_H_OBL
:
16190 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16192 case OPC_DPSU_H_OBR
:
16194 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16196 case OPC_MAQ_S_L_PWL
:
16198 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16200 case OPC_MAQ_S_L_PWR
:
16202 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16204 case OPC_MAQ_S_W_QHLL
:
16206 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16208 case OPC_MAQ_SA_W_QHLL
:
16210 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16212 case OPC_MAQ_S_W_QHLR
:
16214 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16216 case OPC_MAQ_SA_W_QHLR
:
16218 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16220 case OPC_MAQ_S_W_QHRL
:
16222 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16224 case OPC_MAQ_SA_W_QHRL
:
16226 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16228 case OPC_MAQ_S_W_QHRR
:
16230 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16232 case OPC_MAQ_SA_W_QHRR
:
16234 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16236 case OPC_MULSAQ_S_L_PW
:
16238 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16240 case OPC_MULSAQ_S_W_QH
:
16242 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16248 case OPC_ADDU_QB_DSP
:
16250 case OPC_MULEU_S_PH_QBL
:
16252 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16254 case OPC_MULEU_S_PH_QBR
:
16256 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16258 case OPC_MULQ_RS_PH
:
16260 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16262 case OPC_MULEQ_S_W_PHL
:
16264 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16266 case OPC_MULEQ_S_W_PHR
:
16268 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16270 case OPC_MULQ_S_PH
:
16272 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16276 #ifdef TARGET_MIPS64
16277 case OPC_ADDU_OB_DSP
:
16279 case OPC_MULEQ_S_PW_QHL
:
16281 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16283 case OPC_MULEQ_S_PW_QHR
:
16285 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16287 case OPC_MULEU_S_QH_OBL
:
16289 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16291 case OPC_MULEU_S_QH_OBR
:
16293 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16295 case OPC_MULQ_RS_QH
:
16297 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16304 tcg_temp_free_i32(t0
);
16305 tcg_temp_free(v1_t
);
16306 tcg_temp_free(v2_t
);
16309 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16317 /* Treat as NOP. */
16321 t0
= tcg_temp_new();
16322 val_t
= tcg_temp_new();
16323 gen_load_gpr(val_t
, val
);
16326 case OPC_ABSQ_S_PH_DSP
:
16330 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16335 target_long result
;
16336 imm
= (ctx
->opcode
>> 16) & 0xFF;
16337 result
= (uint32_t)imm
<< 24 |
16338 (uint32_t)imm
<< 16 |
16339 (uint32_t)imm
<< 8 |
16341 result
= (int32_t)result
;
16342 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16347 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16348 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16349 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16350 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16351 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16352 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16357 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16358 imm
= (int16_t)(imm
<< 6) >> 6;
16359 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16360 (target_long
)((int32_t)imm
<< 16 | \
16366 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16367 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16368 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16369 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16373 #ifdef TARGET_MIPS64
16374 case OPC_ABSQ_S_QH_DSP
:
16381 imm
= (ctx
->opcode
>> 16) & 0xFF;
16382 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16383 temp
= (temp
<< 16) | temp
;
16384 temp
= (temp
<< 32) | temp
;
16385 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16393 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16394 imm
= (int16_t)(imm
<< 6) >> 6;
16395 temp
= ((target_long
)imm
<< 32) \
16396 | ((target_long
)imm
& 0xFFFFFFFF);
16397 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16405 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16406 imm
= (int16_t)(imm
<< 6) >> 6;
16408 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16409 ((uint64_t)(uint16_t)imm
<< 32) |
16410 ((uint64_t)(uint16_t)imm
<< 16) |
16411 (uint64_t)(uint16_t)imm
;
16412 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16417 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16418 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16419 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16420 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16421 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16422 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16423 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16427 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16428 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16429 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16433 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16434 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16435 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16436 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16437 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16444 tcg_temp_free(val_t
);
16447 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16448 uint32_t op1
, uint32_t op2
,
16449 int ret
, int v1
, int v2
, int check_ret
)
16455 if ((ret
== 0) && (check_ret
== 1)) {
16456 /* Treat as NOP. */
16460 t1
= tcg_temp_new();
16461 v1_t
= tcg_temp_new();
16462 v2_t
= tcg_temp_new();
16464 gen_load_gpr(v1_t
, v1
);
16465 gen_load_gpr(v2_t
, v2
);
16468 case OPC_CMPU_EQ_QB_DSP
:
16470 case OPC_CMPU_EQ_QB
:
16472 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16474 case OPC_CMPU_LT_QB
:
16476 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16478 case OPC_CMPU_LE_QB
:
16480 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16482 case OPC_CMPGU_EQ_QB
:
16484 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16486 case OPC_CMPGU_LT_QB
:
16488 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16490 case OPC_CMPGU_LE_QB
:
16492 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16494 case OPC_CMPGDU_EQ_QB
:
16496 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16497 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16498 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16499 tcg_gen_shli_tl(t1
, t1
, 24);
16500 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16502 case OPC_CMPGDU_LT_QB
:
16504 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16505 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16506 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16507 tcg_gen_shli_tl(t1
, t1
, 24);
16508 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16510 case OPC_CMPGDU_LE_QB
:
16512 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16513 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16514 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16515 tcg_gen_shli_tl(t1
, t1
, 24);
16516 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16518 case OPC_CMP_EQ_PH
:
16520 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16522 case OPC_CMP_LT_PH
:
16524 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16526 case OPC_CMP_LE_PH
:
16528 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16532 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16536 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16538 case OPC_PACKRL_PH
:
16540 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16544 #ifdef TARGET_MIPS64
16545 case OPC_CMPU_EQ_OB_DSP
:
16547 case OPC_CMP_EQ_PW
:
16549 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16551 case OPC_CMP_LT_PW
:
16553 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16555 case OPC_CMP_LE_PW
:
16557 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16559 case OPC_CMP_EQ_QH
:
16561 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16563 case OPC_CMP_LT_QH
:
16565 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16567 case OPC_CMP_LE_QH
:
16569 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16571 case OPC_CMPGDU_EQ_OB
:
16573 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16575 case OPC_CMPGDU_LT_OB
:
16577 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16579 case OPC_CMPGDU_LE_OB
:
16581 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16583 case OPC_CMPGU_EQ_OB
:
16585 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16587 case OPC_CMPGU_LT_OB
:
16589 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16591 case OPC_CMPGU_LE_OB
:
16593 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16595 case OPC_CMPU_EQ_OB
:
16597 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16599 case OPC_CMPU_LT_OB
:
16601 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16603 case OPC_CMPU_LE_OB
:
16605 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16607 case OPC_PACKRL_PW
:
16609 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16613 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16617 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16621 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16629 tcg_temp_free(v1_t
);
16630 tcg_temp_free(v2_t
);
16633 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16634 uint32_t op1
, int rt
, int rs
, int sa
)
16641 /* Treat as NOP. */
16645 t0
= tcg_temp_new();
16646 gen_load_gpr(t0
, rs
);
16649 case OPC_APPEND_DSP
:
16650 switch (MASK_APPEND(ctx
->opcode
)) {
16653 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16655 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16659 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16660 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16661 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16662 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16664 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16668 if (sa
!= 0 && sa
!= 2) {
16669 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16670 tcg_gen_ext32u_tl(t0
, t0
);
16671 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16672 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16674 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16676 default: /* Invalid */
16677 MIPS_INVAL("MASK APPEND");
16678 generate_exception_end(ctx
, EXCP_RI
);
16682 #ifdef TARGET_MIPS64
16683 case OPC_DAPPEND_DSP
:
16684 switch (MASK_DAPPEND(ctx
->opcode
)) {
16687 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16691 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16692 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16693 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16697 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16698 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16699 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16704 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16705 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16706 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16707 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16710 default: /* Invalid */
16711 MIPS_INVAL("MASK DAPPEND");
16712 generate_exception_end(ctx
, EXCP_RI
);
16721 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16722 int ret
, int v1
, int v2
, int check_ret
)
16731 if ((ret
== 0) && (check_ret
== 1)) {
16732 /* Treat as NOP. */
16736 t0
= tcg_temp_new();
16737 t1
= tcg_temp_new();
16738 v1_t
= tcg_temp_new();
16739 v2_t
= tcg_temp_new();
16741 gen_load_gpr(v1_t
, v1
);
16742 gen_load_gpr(v2_t
, v2
);
16745 case OPC_EXTR_W_DSP
:
16749 tcg_gen_movi_tl(t0
, v2
);
16750 tcg_gen_movi_tl(t1
, v1
);
16751 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16754 tcg_gen_movi_tl(t0
, v2
);
16755 tcg_gen_movi_tl(t1
, v1
);
16756 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16758 case OPC_EXTR_RS_W
:
16759 tcg_gen_movi_tl(t0
, v2
);
16760 tcg_gen_movi_tl(t1
, v1
);
16761 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16764 tcg_gen_movi_tl(t0
, v2
);
16765 tcg_gen_movi_tl(t1
, v1
);
16766 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16768 case OPC_EXTRV_S_H
:
16769 tcg_gen_movi_tl(t0
, v2
);
16770 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16773 tcg_gen_movi_tl(t0
, v2
);
16774 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16776 case OPC_EXTRV_R_W
:
16777 tcg_gen_movi_tl(t0
, v2
);
16778 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16780 case OPC_EXTRV_RS_W
:
16781 tcg_gen_movi_tl(t0
, v2
);
16782 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16785 tcg_gen_movi_tl(t0
, v2
);
16786 tcg_gen_movi_tl(t1
, v1
);
16787 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16790 tcg_gen_movi_tl(t0
, v2
);
16791 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16794 tcg_gen_movi_tl(t0
, v2
);
16795 tcg_gen_movi_tl(t1
, v1
);
16796 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16799 tcg_gen_movi_tl(t0
, v2
);
16800 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16803 imm
= (ctx
->opcode
>> 20) & 0x3F;
16804 tcg_gen_movi_tl(t0
, ret
);
16805 tcg_gen_movi_tl(t1
, imm
);
16806 gen_helper_shilo(t0
, t1
, cpu_env
);
16809 tcg_gen_movi_tl(t0
, ret
);
16810 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16813 tcg_gen_movi_tl(t0
, ret
);
16814 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16817 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16818 tcg_gen_movi_tl(t0
, imm
);
16819 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16822 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16823 tcg_gen_movi_tl(t0
, imm
);
16824 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16828 #ifdef TARGET_MIPS64
16829 case OPC_DEXTR_W_DSP
:
16833 tcg_gen_movi_tl(t0
, ret
);
16834 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16838 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16839 int ac
= (ctx
->opcode
>> 11) & 0x03;
16840 tcg_gen_movi_tl(t0
, shift
);
16841 tcg_gen_movi_tl(t1
, ac
);
16842 gen_helper_dshilo(t0
, t1
, cpu_env
);
16847 int ac
= (ctx
->opcode
>> 11) & 0x03;
16848 tcg_gen_movi_tl(t0
, ac
);
16849 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16853 tcg_gen_movi_tl(t0
, v2
);
16854 tcg_gen_movi_tl(t1
, v1
);
16856 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16859 tcg_gen_movi_tl(t0
, v2
);
16860 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16863 tcg_gen_movi_tl(t0
, v2
);
16864 tcg_gen_movi_tl(t1
, v1
);
16865 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16868 tcg_gen_movi_tl(t0
, v2
);
16869 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16872 tcg_gen_movi_tl(t0
, v2
);
16873 tcg_gen_movi_tl(t1
, v1
);
16874 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16876 case OPC_DEXTR_R_L
:
16877 tcg_gen_movi_tl(t0
, v2
);
16878 tcg_gen_movi_tl(t1
, v1
);
16879 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16881 case OPC_DEXTR_RS_L
:
16882 tcg_gen_movi_tl(t0
, v2
);
16883 tcg_gen_movi_tl(t1
, v1
);
16884 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16887 tcg_gen_movi_tl(t0
, v2
);
16888 tcg_gen_movi_tl(t1
, v1
);
16889 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16891 case OPC_DEXTR_R_W
:
16892 tcg_gen_movi_tl(t0
, v2
);
16893 tcg_gen_movi_tl(t1
, v1
);
16894 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16896 case OPC_DEXTR_RS_W
:
16897 tcg_gen_movi_tl(t0
, v2
);
16898 tcg_gen_movi_tl(t1
, v1
);
16899 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16901 case OPC_DEXTR_S_H
:
16902 tcg_gen_movi_tl(t0
, v2
);
16903 tcg_gen_movi_tl(t1
, v1
);
16904 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16906 case OPC_DEXTRV_S_H
:
16907 tcg_gen_movi_tl(t0
, v2
);
16908 tcg_gen_movi_tl(t1
, v1
);
16909 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16912 tcg_gen_movi_tl(t0
, v2
);
16913 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16915 case OPC_DEXTRV_R_L
:
16916 tcg_gen_movi_tl(t0
, v2
);
16917 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16919 case OPC_DEXTRV_RS_L
:
16920 tcg_gen_movi_tl(t0
, v2
);
16921 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16924 tcg_gen_movi_tl(t0
, v2
);
16925 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16927 case OPC_DEXTRV_R_W
:
16928 tcg_gen_movi_tl(t0
, v2
);
16929 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16931 case OPC_DEXTRV_RS_W
:
16932 tcg_gen_movi_tl(t0
, v2
);
16933 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16942 tcg_temp_free(v1_t
);
16943 tcg_temp_free(v2_t
);
16946 /* End MIPSDSP functions. */
16948 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16950 int rs
, rt
, rd
, sa
;
16953 rs
= (ctx
->opcode
>> 21) & 0x1f;
16954 rt
= (ctx
->opcode
>> 16) & 0x1f;
16955 rd
= (ctx
->opcode
>> 11) & 0x1f;
16956 sa
= (ctx
->opcode
>> 6) & 0x1f;
16958 op1
= MASK_SPECIAL(ctx
->opcode
);
16961 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16963 case OPC_MULT
... OPC_DIVU
:
16964 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16974 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16977 MIPS_INVAL("special_r6 muldiv");
16978 generate_exception_end(ctx
, EXCP_RI
);
16984 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16988 if (rt
== 0 && sa
== 1) {
16989 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16990 We need additionally to check other fields */
16991 gen_cl(ctx
, op1
, rd
, rs
);
16993 generate_exception_end(ctx
, EXCP_RI
);
16997 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16998 gen_helper_do_semihosting(cpu_env
);
17000 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17001 generate_exception_end(ctx
, EXCP_RI
);
17003 generate_exception_end(ctx
, EXCP_DBp
);
17007 #if defined(TARGET_MIPS64)
17009 check_mips_64(ctx
);
17010 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17014 if (rt
== 0 && sa
== 1) {
17015 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17016 We need additionally to check other fields */
17017 check_mips_64(ctx
);
17018 gen_cl(ctx
, op1
, rd
, rs
);
17020 generate_exception_end(ctx
, EXCP_RI
);
17023 case OPC_DMULT
... OPC_DDIVU
:
17024 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17034 check_mips_64(ctx
);
17035 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17038 MIPS_INVAL("special_r6 muldiv");
17039 generate_exception_end(ctx
, EXCP_RI
);
17044 default: /* Invalid */
17045 MIPS_INVAL("special_r6");
17046 generate_exception_end(ctx
, EXCP_RI
);
17051 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17053 int rs
, rt
, rd
, sa
;
17056 rs
= (ctx
->opcode
>> 21) & 0x1f;
17057 rt
= (ctx
->opcode
>> 16) & 0x1f;
17058 rd
= (ctx
->opcode
>> 11) & 0x1f;
17059 sa
= (ctx
->opcode
>> 6) & 0x1f;
17061 op1
= MASK_SPECIAL(ctx
->opcode
);
17063 case OPC_MOVN
: /* Conditional move */
17065 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17066 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17067 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17069 case OPC_MFHI
: /* Move from HI/LO */
17071 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17074 case OPC_MTLO
: /* Move to HI/LO */
17075 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17078 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17079 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17080 check_cp1_enabled(ctx
);
17081 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17082 (ctx
->opcode
>> 16) & 1);
17084 generate_exception_err(ctx
, EXCP_CpU
, 1);
17090 check_insn(ctx
, INSN_VR54XX
);
17091 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17092 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17094 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17099 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17101 #if defined(TARGET_MIPS64)
17102 case OPC_DMULT
... OPC_DDIVU
:
17103 check_insn(ctx
, ISA_MIPS3
);
17104 check_mips_64(ctx
);
17105 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17109 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17112 #ifdef MIPS_STRICT_STANDARD
17113 MIPS_INVAL("SPIM");
17114 generate_exception_end(ctx
, EXCP_RI
);
17116 /* Implemented as RI exception for now. */
17117 MIPS_INVAL("spim (unofficial)");
17118 generate_exception_end(ctx
, EXCP_RI
);
17121 default: /* Invalid */
17122 MIPS_INVAL("special_legacy");
17123 generate_exception_end(ctx
, EXCP_RI
);
17128 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17130 int rs
, rt
, rd
, sa
;
17133 rs
= (ctx
->opcode
>> 21) & 0x1f;
17134 rt
= (ctx
->opcode
>> 16) & 0x1f;
17135 rd
= (ctx
->opcode
>> 11) & 0x1f;
17136 sa
= (ctx
->opcode
>> 6) & 0x1f;
17138 op1
= MASK_SPECIAL(ctx
->opcode
);
17140 case OPC_SLL
: /* Shift with immediate */
17141 if (sa
== 5 && rd
== 0 &&
17142 rs
== 0 && rt
== 0) { /* PAUSE */
17143 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17144 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17145 generate_exception_end(ctx
, EXCP_RI
);
17151 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17154 switch ((ctx
->opcode
>> 21) & 0x1f) {
17156 /* rotr is decoded as srl on non-R2 CPUs */
17157 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17162 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17165 generate_exception_end(ctx
, EXCP_RI
);
17169 case OPC_ADD
... OPC_SUBU
:
17170 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17172 case OPC_SLLV
: /* Shifts */
17174 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17177 switch ((ctx
->opcode
>> 6) & 0x1f) {
17179 /* rotrv is decoded as srlv on non-R2 CPUs */
17180 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17185 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17188 generate_exception_end(ctx
, EXCP_RI
);
17192 case OPC_SLT
: /* Set on less than */
17194 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17196 case OPC_AND
: /* Logic*/
17200 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17203 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17205 case OPC_TGE
... OPC_TEQ
: /* Traps */
17207 check_insn(ctx
, ISA_MIPS2
);
17208 gen_trap(ctx
, op1
, rs
, rt
, -1);
17210 case OPC_LSA
: /* OPC_PMON */
17211 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17212 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17213 decode_opc_special_r6(env
, ctx
);
17215 /* Pmon entry point, also R4010 selsl */
17216 #ifdef MIPS_STRICT_STANDARD
17217 MIPS_INVAL("PMON / selsl");
17218 generate_exception_end(ctx
, EXCP_RI
);
17220 gen_helper_0e0i(pmon
, sa
);
17225 generate_exception_end(ctx
, EXCP_SYSCALL
);
17228 generate_exception_end(ctx
, EXCP_BREAK
);
17231 check_insn(ctx
, ISA_MIPS2
);
17232 gen_sync(extract32(ctx
->opcode
, 6, 5));
17235 #if defined(TARGET_MIPS64)
17236 /* MIPS64 specific opcodes */
17241 check_insn(ctx
, ISA_MIPS3
);
17242 check_mips_64(ctx
);
17243 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17246 switch ((ctx
->opcode
>> 21) & 0x1f) {
17248 /* drotr is decoded as dsrl on non-R2 CPUs */
17249 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17254 check_insn(ctx
, ISA_MIPS3
);
17255 check_mips_64(ctx
);
17256 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17259 generate_exception_end(ctx
, EXCP_RI
);
17264 switch ((ctx
->opcode
>> 21) & 0x1f) {
17266 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17267 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17272 check_insn(ctx
, ISA_MIPS3
);
17273 check_mips_64(ctx
);
17274 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17277 generate_exception_end(ctx
, EXCP_RI
);
17281 case OPC_DADD
... OPC_DSUBU
:
17282 check_insn(ctx
, ISA_MIPS3
);
17283 check_mips_64(ctx
);
17284 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17288 check_insn(ctx
, ISA_MIPS3
);
17289 check_mips_64(ctx
);
17290 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17293 switch ((ctx
->opcode
>> 6) & 0x1f) {
17295 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17296 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17301 check_insn(ctx
, ISA_MIPS3
);
17302 check_mips_64(ctx
);
17303 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17306 generate_exception_end(ctx
, EXCP_RI
);
17311 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17312 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17313 decode_opc_special_r6(env
, ctx
);
17318 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17319 decode_opc_special_r6(env
, ctx
);
17321 decode_opc_special_legacy(env
, ctx
);
17326 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17331 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17333 rs
= (ctx
->opcode
>> 21) & 0x1f;
17334 rt
= (ctx
->opcode
>> 16) & 0x1f;
17335 rd
= (ctx
->opcode
>> 11) & 0x1f;
17337 op1
= MASK_SPECIAL2(ctx
->opcode
);
17339 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17340 case OPC_MSUB
... OPC_MSUBU
:
17341 check_insn(ctx
, ISA_MIPS32
);
17342 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17345 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17348 case OPC_DIVU_G_2F
:
17349 case OPC_MULT_G_2F
:
17350 case OPC_MULTU_G_2F
:
17352 case OPC_MODU_G_2F
:
17353 check_insn(ctx
, INSN_LOONGSON2F
);
17354 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17358 check_insn(ctx
, ISA_MIPS32
);
17359 gen_cl(ctx
, op1
, rd
, rs
);
17362 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17363 gen_helper_do_semihosting(cpu_env
);
17365 /* XXX: not clear which exception should be raised
17366 * when in debug mode...
17368 check_insn(ctx
, ISA_MIPS32
);
17369 generate_exception_end(ctx
, EXCP_DBp
);
17372 #if defined(TARGET_MIPS64)
17375 check_insn(ctx
, ISA_MIPS64
);
17376 check_mips_64(ctx
);
17377 gen_cl(ctx
, op1
, rd
, rs
);
17379 case OPC_DMULT_G_2F
:
17380 case OPC_DMULTU_G_2F
:
17381 case OPC_DDIV_G_2F
:
17382 case OPC_DDIVU_G_2F
:
17383 case OPC_DMOD_G_2F
:
17384 case OPC_DMODU_G_2F
:
17385 check_insn(ctx
, INSN_LOONGSON2F
);
17386 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17389 default: /* Invalid */
17390 MIPS_INVAL("special2_legacy");
17391 generate_exception_end(ctx
, EXCP_RI
);
17396 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17398 int rs
, rt
, rd
, sa
;
17402 rs
= (ctx
->opcode
>> 21) & 0x1f;
17403 rt
= (ctx
->opcode
>> 16) & 0x1f;
17404 rd
= (ctx
->opcode
>> 11) & 0x1f;
17405 sa
= (ctx
->opcode
>> 6) & 0x1f;
17406 imm
= (int16_t)ctx
->opcode
>> 7;
17408 op1
= MASK_SPECIAL3(ctx
->opcode
);
17412 /* hint codes 24-31 are reserved and signal RI */
17413 generate_exception_end(ctx
, EXCP_RI
);
17415 /* Treat as NOP. */
17418 check_cp0_enabled(ctx
);
17419 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17420 gen_cache_operation(ctx
, rt
, rs
, imm
);
17424 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17427 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17432 /* Treat as NOP. */
17435 op2
= MASK_BSHFL(ctx
->opcode
);
17437 case OPC_ALIGN
... OPC_ALIGN_END
:
17438 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17441 gen_bitswap(ctx
, op2
, rd
, rt
);
17446 #if defined(TARGET_MIPS64)
17448 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17451 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17454 check_mips_64(ctx
);
17457 /* Treat as NOP. */
17460 op2
= MASK_DBSHFL(ctx
->opcode
);
17462 case OPC_DALIGN
... OPC_DALIGN_END
:
17463 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17466 gen_bitswap(ctx
, op2
, rd
, rt
);
17473 default: /* Invalid */
17474 MIPS_INVAL("special3_r6");
17475 generate_exception_end(ctx
, EXCP_RI
);
17480 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17485 rs
= (ctx
->opcode
>> 21) & 0x1f;
17486 rt
= (ctx
->opcode
>> 16) & 0x1f;
17487 rd
= (ctx
->opcode
>> 11) & 0x1f;
17489 op1
= MASK_SPECIAL3(ctx
->opcode
);
17491 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17492 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17493 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17494 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17495 * the same mask and op1. */
17496 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17497 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17500 case OPC_ADDUH_R_QB
:
17502 case OPC_ADDQH_R_PH
:
17504 case OPC_ADDQH_R_W
:
17506 case OPC_SUBUH_R_QB
:
17508 case OPC_SUBQH_R_PH
:
17510 case OPC_SUBQH_R_W
:
17511 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17516 case OPC_MULQ_RS_W
:
17517 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17520 MIPS_INVAL("MASK ADDUH.QB");
17521 generate_exception_end(ctx
, EXCP_RI
);
17524 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17525 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17527 generate_exception_end(ctx
, EXCP_RI
);
17531 op2
= MASK_LX(ctx
->opcode
);
17533 #if defined(TARGET_MIPS64)
17539 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17541 default: /* Invalid */
17542 MIPS_INVAL("MASK LX");
17543 generate_exception_end(ctx
, EXCP_RI
);
17547 case OPC_ABSQ_S_PH_DSP
:
17548 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17550 case OPC_ABSQ_S_QB
:
17551 case OPC_ABSQ_S_PH
:
17553 case OPC_PRECEQ_W_PHL
:
17554 case OPC_PRECEQ_W_PHR
:
17555 case OPC_PRECEQU_PH_QBL
:
17556 case OPC_PRECEQU_PH_QBR
:
17557 case OPC_PRECEQU_PH_QBLA
:
17558 case OPC_PRECEQU_PH_QBRA
:
17559 case OPC_PRECEU_PH_QBL
:
17560 case OPC_PRECEU_PH_QBR
:
17561 case OPC_PRECEU_PH_QBLA
:
17562 case OPC_PRECEU_PH_QBRA
:
17563 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17570 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17573 MIPS_INVAL("MASK ABSQ_S.PH");
17574 generate_exception_end(ctx
, EXCP_RI
);
17578 case OPC_ADDU_QB_DSP
:
17579 op2
= MASK_ADDU_QB(ctx
->opcode
);
17582 case OPC_ADDQ_S_PH
:
17585 case OPC_ADDU_S_QB
:
17587 case OPC_ADDU_S_PH
:
17589 case OPC_SUBQ_S_PH
:
17592 case OPC_SUBU_S_QB
:
17594 case OPC_SUBU_S_PH
:
17598 case OPC_RADDU_W_QB
:
17599 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17601 case OPC_MULEU_S_PH_QBL
:
17602 case OPC_MULEU_S_PH_QBR
:
17603 case OPC_MULQ_RS_PH
:
17604 case OPC_MULEQ_S_W_PHL
:
17605 case OPC_MULEQ_S_W_PHR
:
17606 case OPC_MULQ_S_PH
:
17607 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17609 default: /* Invalid */
17610 MIPS_INVAL("MASK ADDU.QB");
17611 generate_exception_end(ctx
, EXCP_RI
);
17616 case OPC_CMPU_EQ_QB_DSP
:
17617 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17619 case OPC_PRECR_SRA_PH_W
:
17620 case OPC_PRECR_SRA_R_PH_W
:
17621 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17623 case OPC_PRECR_QB_PH
:
17624 case OPC_PRECRQ_QB_PH
:
17625 case OPC_PRECRQ_PH_W
:
17626 case OPC_PRECRQ_RS_PH_W
:
17627 case OPC_PRECRQU_S_QB_PH
:
17628 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17630 case OPC_CMPU_EQ_QB
:
17631 case OPC_CMPU_LT_QB
:
17632 case OPC_CMPU_LE_QB
:
17633 case OPC_CMP_EQ_PH
:
17634 case OPC_CMP_LT_PH
:
17635 case OPC_CMP_LE_PH
:
17636 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17638 case OPC_CMPGU_EQ_QB
:
17639 case OPC_CMPGU_LT_QB
:
17640 case OPC_CMPGU_LE_QB
:
17641 case OPC_CMPGDU_EQ_QB
:
17642 case OPC_CMPGDU_LT_QB
:
17643 case OPC_CMPGDU_LE_QB
:
17646 case OPC_PACKRL_PH
:
17647 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17649 default: /* Invalid */
17650 MIPS_INVAL("MASK CMPU.EQ.QB");
17651 generate_exception_end(ctx
, EXCP_RI
);
17655 case OPC_SHLL_QB_DSP
:
17656 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17658 case OPC_DPA_W_PH_DSP
:
17659 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17661 case OPC_DPAU_H_QBL
:
17662 case OPC_DPAU_H_QBR
:
17663 case OPC_DPSU_H_QBL
:
17664 case OPC_DPSU_H_QBR
:
17666 case OPC_DPAX_W_PH
:
17667 case OPC_DPAQ_S_W_PH
:
17668 case OPC_DPAQX_S_W_PH
:
17669 case OPC_DPAQX_SA_W_PH
:
17671 case OPC_DPSX_W_PH
:
17672 case OPC_DPSQ_S_W_PH
:
17673 case OPC_DPSQX_S_W_PH
:
17674 case OPC_DPSQX_SA_W_PH
:
17675 case OPC_MULSAQ_S_W_PH
:
17676 case OPC_DPAQ_SA_L_W
:
17677 case OPC_DPSQ_SA_L_W
:
17678 case OPC_MAQ_S_W_PHL
:
17679 case OPC_MAQ_S_W_PHR
:
17680 case OPC_MAQ_SA_W_PHL
:
17681 case OPC_MAQ_SA_W_PHR
:
17682 case OPC_MULSA_W_PH
:
17683 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17685 default: /* Invalid */
17686 MIPS_INVAL("MASK DPAW.PH");
17687 generate_exception_end(ctx
, EXCP_RI
);
17692 op2
= MASK_INSV(ctx
->opcode
);
17703 t0
= tcg_temp_new();
17704 t1
= tcg_temp_new();
17706 gen_load_gpr(t0
, rt
);
17707 gen_load_gpr(t1
, rs
);
17709 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17715 default: /* Invalid */
17716 MIPS_INVAL("MASK INSV");
17717 generate_exception_end(ctx
, EXCP_RI
);
17721 case OPC_APPEND_DSP
:
17722 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17724 case OPC_EXTR_W_DSP
:
17725 op2
= MASK_EXTR_W(ctx
->opcode
);
17729 case OPC_EXTR_RS_W
:
17731 case OPC_EXTRV_S_H
:
17733 case OPC_EXTRV_R_W
:
17734 case OPC_EXTRV_RS_W
:
17739 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17742 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17748 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17750 default: /* Invalid */
17751 MIPS_INVAL("MASK EXTR.W");
17752 generate_exception_end(ctx
, EXCP_RI
);
17756 #if defined(TARGET_MIPS64)
17757 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17758 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17759 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17760 check_insn(ctx
, INSN_LOONGSON2E
);
17761 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17763 case OPC_ABSQ_S_QH_DSP
:
17764 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17766 case OPC_PRECEQ_L_PWL
:
17767 case OPC_PRECEQ_L_PWR
:
17768 case OPC_PRECEQ_PW_QHL
:
17769 case OPC_PRECEQ_PW_QHR
:
17770 case OPC_PRECEQ_PW_QHLA
:
17771 case OPC_PRECEQ_PW_QHRA
:
17772 case OPC_PRECEQU_QH_OBL
:
17773 case OPC_PRECEQU_QH_OBR
:
17774 case OPC_PRECEQU_QH_OBLA
:
17775 case OPC_PRECEQU_QH_OBRA
:
17776 case OPC_PRECEU_QH_OBL
:
17777 case OPC_PRECEU_QH_OBR
:
17778 case OPC_PRECEU_QH_OBLA
:
17779 case OPC_PRECEU_QH_OBRA
:
17780 case OPC_ABSQ_S_OB
:
17781 case OPC_ABSQ_S_PW
:
17782 case OPC_ABSQ_S_QH
:
17783 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17791 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17793 default: /* Invalid */
17794 MIPS_INVAL("MASK ABSQ_S.QH");
17795 generate_exception_end(ctx
, EXCP_RI
);
17799 case OPC_ADDU_OB_DSP
:
17800 op2
= MASK_ADDU_OB(ctx
->opcode
);
17802 case OPC_RADDU_L_OB
:
17804 case OPC_SUBQ_S_PW
:
17806 case OPC_SUBQ_S_QH
:
17808 case OPC_SUBU_S_OB
:
17810 case OPC_SUBU_S_QH
:
17812 case OPC_SUBUH_R_OB
:
17814 case OPC_ADDQ_S_PW
:
17816 case OPC_ADDQ_S_QH
:
17818 case OPC_ADDU_S_OB
:
17820 case OPC_ADDU_S_QH
:
17822 case OPC_ADDUH_R_OB
:
17823 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17825 case OPC_MULEQ_S_PW_QHL
:
17826 case OPC_MULEQ_S_PW_QHR
:
17827 case OPC_MULEU_S_QH_OBL
:
17828 case OPC_MULEU_S_QH_OBR
:
17829 case OPC_MULQ_RS_QH
:
17830 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17832 default: /* Invalid */
17833 MIPS_INVAL("MASK ADDU.OB");
17834 generate_exception_end(ctx
, EXCP_RI
);
17838 case OPC_CMPU_EQ_OB_DSP
:
17839 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17841 case OPC_PRECR_SRA_QH_PW
:
17842 case OPC_PRECR_SRA_R_QH_PW
:
17843 /* Return value is rt. */
17844 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17846 case OPC_PRECR_OB_QH
:
17847 case OPC_PRECRQ_OB_QH
:
17848 case OPC_PRECRQ_PW_L
:
17849 case OPC_PRECRQ_QH_PW
:
17850 case OPC_PRECRQ_RS_QH_PW
:
17851 case OPC_PRECRQU_S_OB_QH
:
17852 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17854 case OPC_CMPU_EQ_OB
:
17855 case OPC_CMPU_LT_OB
:
17856 case OPC_CMPU_LE_OB
:
17857 case OPC_CMP_EQ_QH
:
17858 case OPC_CMP_LT_QH
:
17859 case OPC_CMP_LE_QH
:
17860 case OPC_CMP_EQ_PW
:
17861 case OPC_CMP_LT_PW
:
17862 case OPC_CMP_LE_PW
:
17863 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17865 case OPC_CMPGDU_EQ_OB
:
17866 case OPC_CMPGDU_LT_OB
:
17867 case OPC_CMPGDU_LE_OB
:
17868 case OPC_CMPGU_EQ_OB
:
17869 case OPC_CMPGU_LT_OB
:
17870 case OPC_CMPGU_LE_OB
:
17871 case OPC_PACKRL_PW
:
17875 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17877 default: /* Invalid */
17878 MIPS_INVAL("MASK CMPU_EQ.OB");
17879 generate_exception_end(ctx
, EXCP_RI
);
17883 case OPC_DAPPEND_DSP
:
17884 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17886 case OPC_DEXTR_W_DSP
:
17887 op2
= MASK_DEXTR_W(ctx
->opcode
);
17894 case OPC_DEXTR_R_L
:
17895 case OPC_DEXTR_RS_L
:
17897 case OPC_DEXTR_R_W
:
17898 case OPC_DEXTR_RS_W
:
17899 case OPC_DEXTR_S_H
:
17901 case OPC_DEXTRV_R_L
:
17902 case OPC_DEXTRV_RS_L
:
17903 case OPC_DEXTRV_S_H
:
17905 case OPC_DEXTRV_R_W
:
17906 case OPC_DEXTRV_RS_W
:
17907 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17912 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17914 default: /* Invalid */
17915 MIPS_INVAL("MASK EXTR.W");
17916 generate_exception_end(ctx
, EXCP_RI
);
17920 case OPC_DPAQ_W_QH_DSP
:
17921 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17923 case OPC_DPAU_H_OBL
:
17924 case OPC_DPAU_H_OBR
:
17925 case OPC_DPSU_H_OBL
:
17926 case OPC_DPSU_H_OBR
:
17928 case OPC_DPAQ_S_W_QH
:
17930 case OPC_DPSQ_S_W_QH
:
17931 case OPC_MULSAQ_S_W_QH
:
17932 case OPC_DPAQ_SA_L_PW
:
17933 case OPC_DPSQ_SA_L_PW
:
17934 case OPC_MULSAQ_S_L_PW
:
17935 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17937 case OPC_MAQ_S_W_QHLL
:
17938 case OPC_MAQ_S_W_QHLR
:
17939 case OPC_MAQ_S_W_QHRL
:
17940 case OPC_MAQ_S_W_QHRR
:
17941 case OPC_MAQ_SA_W_QHLL
:
17942 case OPC_MAQ_SA_W_QHLR
:
17943 case OPC_MAQ_SA_W_QHRL
:
17944 case OPC_MAQ_SA_W_QHRR
:
17945 case OPC_MAQ_S_L_PWL
:
17946 case OPC_MAQ_S_L_PWR
:
17951 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17953 default: /* Invalid */
17954 MIPS_INVAL("MASK DPAQ.W.QH");
17955 generate_exception_end(ctx
, EXCP_RI
);
17959 case OPC_DINSV_DSP
:
17960 op2
= MASK_INSV(ctx
->opcode
);
17971 t0
= tcg_temp_new();
17972 t1
= tcg_temp_new();
17974 gen_load_gpr(t0
, rt
);
17975 gen_load_gpr(t1
, rs
);
17977 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17983 default: /* Invalid */
17984 MIPS_INVAL("MASK DINSV");
17985 generate_exception_end(ctx
, EXCP_RI
);
17989 case OPC_SHLL_OB_DSP
:
17990 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17993 default: /* Invalid */
17994 MIPS_INVAL("special3_legacy");
17995 generate_exception_end(ctx
, EXCP_RI
);
18000 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18002 int rs
, rt
, rd
, sa
;
18005 rs
= (ctx
->opcode
>> 21) & 0x1f;
18006 rt
= (ctx
->opcode
>> 16) & 0x1f;
18007 rd
= (ctx
->opcode
>> 11) & 0x1f;
18008 sa
= (ctx
->opcode
>> 6) & 0x1f;
18010 op1
= MASK_SPECIAL3(ctx
->opcode
);
18014 check_insn(ctx
, ISA_MIPS32R2
);
18015 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18018 op2
= MASK_BSHFL(ctx
->opcode
);
18020 case OPC_ALIGN
... OPC_ALIGN_END
:
18022 check_insn(ctx
, ISA_MIPS32R6
);
18023 decode_opc_special3_r6(env
, ctx
);
18026 check_insn(ctx
, ISA_MIPS32R2
);
18027 gen_bshfl(ctx
, op2
, rt
, rd
);
18031 #if defined(TARGET_MIPS64)
18032 case OPC_DEXTM
... OPC_DEXT
:
18033 case OPC_DINSM
... OPC_DINS
:
18034 check_insn(ctx
, ISA_MIPS64R2
);
18035 check_mips_64(ctx
);
18036 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18039 op2
= MASK_DBSHFL(ctx
->opcode
);
18041 case OPC_DALIGN
... OPC_DALIGN_END
:
18043 check_insn(ctx
, ISA_MIPS32R6
);
18044 decode_opc_special3_r6(env
, ctx
);
18047 check_insn(ctx
, ISA_MIPS64R2
);
18048 check_mips_64(ctx
);
18049 op2
= MASK_DBSHFL(ctx
->opcode
);
18050 gen_bshfl(ctx
, op2
, rt
, rd
);
18056 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18059 check_insn(ctx
, ASE_MT
);
18061 TCGv t0
= tcg_temp_new();
18062 TCGv t1
= tcg_temp_new();
18064 gen_load_gpr(t0
, rt
);
18065 gen_load_gpr(t1
, rs
);
18066 gen_helper_fork(t0
, t1
);
18072 check_insn(ctx
, ASE_MT
);
18074 TCGv t0
= tcg_temp_new();
18076 gen_load_gpr(t0
, rs
);
18077 gen_helper_yield(t0
, cpu_env
, t0
);
18078 gen_store_gpr(t0
, rd
);
18083 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18084 decode_opc_special3_r6(env
, ctx
);
18086 decode_opc_special3_legacy(env
, ctx
);
18091 /* MIPS SIMD Architecture (MSA) */
18092 static inline int check_msa_access(DisasContext
*ctx
)
18094 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18095 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18096 generate_exception_end(ctx
, EXCP_RI
);
18100 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18101 if (ctx
->insn_flags
& ASE_MSA
) {
18102 generate_exception_end(ctx
, EXCP_MSADIS
);
18105 generate_exception_end(ctx
, EXCP_RI
);
18112 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18114 /* generates tcg ops to check if any element is 0 */
18115 /* Note this function only works with MSA_WRLEN = 128 */
18116 uint64_t eval_zero_or_big
= 0;
18117 uint64_t eval_big
= 0;
18118 TCGv_i64 t0
= tcg_temp_new_i64();
18119 TCGv_i64 t1
= tcg_temp_new_i64();
18122 eval_zero_or_big
= 0x0101010101010101ULL
;
18123 eval_big
= 0x8080808080808080ULL
;
18126 eval_zero_or_big
= 0x0001000100010001ULL
;
18127 eval_big
= 0x8000800080008000ULL
;
18130 eval_zero_or_big
= 0x0000000100000001ULL
;
18131 eval_big
= 0x8000000080000000ULL
;
18134 eval_zero_or_big
= 0x0000000000000001ULL
;
18135 eval_big
= 0x8000000000000000ULL
;
18138 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18139 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18140 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18141 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18142 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18143 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18144 tcg_gen_or_i64(t0
, t0
, t1
);
18145 /* if all bits are zero then all elements are not zero */
18146 /* if some bit is non-zero then some element is zero */
18147 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18148 tcg_gen_trunc_i64_tl(tresult
, t0
);
18149 tcg_temp_free_i64(t0
);
18150 tcg_temp_free_i64(t1
);
18153 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18155 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18156 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18157 int64_t s16
= (int16_t)ctx
->opcode
;
18159 check_msa_access(ctx
);
18161 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18162 generate_exception_end(ctx
, EXCP_RI
);
18169 TCGv_i64 t0
= tcg_temp_new_i64();
18170 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18171 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18172 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18173 tcg_gen_trunc_i64_tl(bcond
, t0
);
18174 tcg_temp_free_i64(t0
);
18181 gen_check_zero_element(bcond
, df
, wt
);
18187 gen_check_zero_element(bcond
, df
, wt
);
18188 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18192 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18194 ctx
->hflags
|= MIPS_HFLAG_BC
;
18195 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18198 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18200 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18201 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18202 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18203 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18205 TCGv_i32 twd
= tcg_const_i32(wd
);
18206 TCGv_i32 tws
= tcg_const_i32(ws
);
18207 TCGv_i32 ti8
= tcg_const_i32(i8
);
18209 switch (MASK_MSA_I8(ctx
->opcode
)) {
18211 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18214 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18217 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18220 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18223 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18226 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18229 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18235 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18236 if (df
== DF_DOUBLE
) {
18237 generate_exception_end(ctx
, EXCP_RI
);
18239 TCGv_i32 tdf
= tcg_const_i32(df
);
18240 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18241 tcg_temp_free_i32(tdf
);
18246 MIPS_INVAL("MSA instruction");
18247 generate_exception_end(ctx
, EXCP_RI
);
18251 tcg_temp_free_i32(twd
);
18252 tcg_temp_free_i32(tws
);
18253 tcg_temp_free_i32(ti8
);
18256 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18258 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18259 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18260 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18261 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18262 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18263 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18265 TCGv_i32 tdf
= tcg_const_i32(df
);
18266 TCGv_i32 twd
= tcg_const_i32(wd
);
18267 TCGv_i32 tws
= tcg_const_i32(ws
);
18268 TCGv_i32 timm
= tcg_temp_new_i32();
18269 tcg_gen_movi_i32(timm
, u5
);
18271 switch (MASK_MSA_I5(ctx
->opcode
)) {
18273 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18276 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18278 case OPC_MAXI_S_df
:
18279 tcg_gen_movi_i32(timm
, s5
);
18280 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18282 case OPC_MAXI_U_df
:
18283 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18285 case OPC_MINI_S_df
:
18286 tcg_gen_movi_i32(timm
, s5
);
18287 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18289 case OPC_MINI_U_df
:
18290 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18293 tcg_gen_movi_i32(timm
, s5
);
18294 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18296 case OPC_CLTI_S_df
:
18297 tcg_gen_movi_i32(timm
, s5
);
18298 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18300 case OPC_CLTI_U_df
:
18301 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18303 case OPC_CLEI_S_df
:
18304 tcg_gen_movi_i32(timm
, s5
);
18305 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18307 case OPC_CLEI_U_df
:
18308 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18312 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18313 tcg_gen_movi_i32(timm
, s10
);
18314 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18318 MIPS_INVAL("MSA instruction");
18319 generate_exception_end(ctx
, EXCP_RI
);
18323 tcg_temp_free_i32(tdf
);
18324 tcg_temp_free_i32(twd
);
18325 tcg_temp_free_i32(tws
);
18326 tcg_temp_free_i32(timm
);
18329 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18331 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18332 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18333 uint32_t df
= 0, m
= 0;
18334 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18335 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18342 if ((dfm
& 0x40) == 0x00) {
18345 } else if ((dfm
& 0x60) == 0x40) {
18348 } else if ((dfm
& 0x70) == 0x60) {
18351 } else if ((dfm
& 0x78) == 0x70) {
18355 generate_exception_end(ctx
, EXCP_RI
);
18359 tdf
= tcg_const_i32(df
);
18360 tm
= tcg_const_i32(m
);
18361 twd
= tcg_const_i32(wd
);
18362 tws
= tcg_const_i32(ws
);
18364 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18366 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18369 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18372 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18375 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18378 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18381 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18383 case OPC_BINSLI_df
:
18384 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18386 case OPC_BINSRI_df
:
18387 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18390 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18393 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18396 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18399 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18402 MIPS_INVAL("MSA instruction");
18403 generate_exception_end(ctx
, EXCP_RI
);
18407 tcg_temp_free_i32(tdf
);
18408 tcg_temp_free_i32(tm
);
18409 tcg_temp_free_i32(twd
);
18410 tcg_temp_free_i32(tws
);
18413 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18415 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18416 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18417 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18418 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18419 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18421 TCGv_i32 tdf
= tcg_const_i32(df
);
18422 TCGv_i32 twd
= tcg_const_i32(wd
);
18423 TCGv_i32 tws
= tcg_const_i32(ws
);
18424 TCGv_i32 twt
= tcg_const_i32(wt
);
18426 switch (MASK_MSA_3R(ctx
->opcode
)) {
18428 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18431 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18434 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18437 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18439 case OPC_SUBS_S_df
:
18440 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18443 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18446 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18449 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18452 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18455 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18457 case OPC_ADDS_A_df
:
18458 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18460 case OPC_SUBS_U_df
:
18461 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18464 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18467 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18470 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18473 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18476 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18479 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18481 case OPC_ADDS_S_df
:
18482 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18484 case OPC_SUBSUS_U_df
:
18485 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18488 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18491 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18494 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18497 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18500 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18503 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18505 case OPC_ADDS_U_df
:
18506 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18508 case OPC_SUBSUU_S_df
:
18509 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18512 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18515 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18518 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18521 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18524 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 case OPC_ASUB_S_df
:
18527 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18530 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18533 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18536 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18539 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18542 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18545 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18547 case OPC_ASUB_U_df
:
18548 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18551 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18554 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18557 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18560 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18562 case OPC_AVER_S_df
:
18563 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18566 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18569 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18572 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18575 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18577 case OPC_AVER_U_df
:
18578 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18581 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18584 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18587 case OPC_DOTP_S_df
:
18588 case OPC_DOTP_U_df
:
18589 case OPC_DPADD_S_df
:
18590 case OPC_DPADD_U_df
:
18591 case OPC_DPSUB_S_df
:
18592 case OPC_HADD_S_df
:
18593 case OPC_DPSUB_U_df
:
18594 case OPC_HADD_U_df
:
18595 case OPC_HSUB_S_df
:
18596 case OPC_HSUB_U_df
:
18597 if (df
== DF_BYTE
) {
18598 generate_exception_end(ctx
, EXCP_RI
);
18601 switch (MASK_MSA_3R(ctx
->opcode
)) {
18602 case OPC_DOTP_S_df
:
18603 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18605 case OPC_DOTP_U_df
:
18606 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18608 case OPC_DPADD_S_df
:
18609 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18611 case OPC_DPADD_U_df
:
18612 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18614 case OPC_DPSUB_S_df
:
18615 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18617 case OPC_HADD_S_df
:
18618 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18620 case OPC_DPSUB_U_df
:
18621 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18623 case OPC_HADD_U_df
:
18624 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18626 case OPC_HSUB_S_df
:
18627 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18629 case OPC_HSUB_U_df
:
18630 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18635 MIPS_INVAL("MSA instruction");
18636 generate_exception_end(ctx
, EXCP_RI
);
18639 tcg_temp_free_i32(twd
);
18640 tcg_temp_free_i32(tws
);
18641 tcg_temp_free_i32(twt
);
18642 tcg_temp_free_i32(tdf
);
18645 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18647 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18648 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18649 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18650 TCGv telm
= tcg_temp_new();
18651 TCGv_i32 tsr
= tcg_const_i32(source
);
18652 TCGv_i32 tdt
= tcg_const_i32(dest
);
18654 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18656 gen_load_gpr(telm
, source
);
18657 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18660 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18661 gen_store_gpr(telm
, dest
);
18664 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18667 MIPS_INVAL("MSA instruction");
18668 generate_exception_end(ctx
, EXCP_RI
);
18672 tcg_temp_free(telm
);
18673 tcg_temp_free_i32(tdt
);
18674 tcg_temp_free_i32(tsr
);
18677 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18680 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18681 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18682 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18684 TCGv_i32 tws
= tcg_const_i32(ws
);
18685 TCGv_i32 twd
= tcg_const_i32(wd
);
18686 TCGv_i32 tn
= tcg_const_i32(n
);
18687 TCGv_i32 tdf
= tcg_const_i32(df
);
18689 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18691 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18693 case OPC_SPLATI_df
:
18694 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18697 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18699 case OPC_COPY_S_df
:
18700 case OPC_COPY_U_df
:
18701 case OPC_INSERT_df
:
18702 #if !defined(TARGET_MIPS64)
18703 /* Double format valid only for MIPS64 */
18704 if (df
== DF_DOUBLE
) {
18705 generate_exception_end(ctx
, EXCP_RI
);
18709 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18710 case OPC_COPY_S_df
:
18711 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18713 case OPC_COPY_U_df
:
18714 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18716 case OPC_INSERT_df
:
18717 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18722 MIPS_INVAL("MSA instruction");
18723 generate_exception_end(ctx
, EXCP_RI
);
18725 tcg_temp_free_i32(twd
);
18726 tcg_temp_free_i32(tws
);
18727 tcg_temp_free_i32(tn
);
18728 tcg_temp_free_i32(tdf
);
18731 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18733 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18734 uint32_t df
= 0, n
= 0;
18736 if ((dfn
& 0x30) == 0x00) {
18739 } else if ((dfn
& 0x38) == 0x20) {
18742 } else if ((dfn
& 0x3c) == 0x30) {
18745 } else if ((dfn
& 0x3e) == 0x38) {
18748 } else if (dfn
== 0x3E) {
18749 /* CTCMSA, CFCMSA, MOVE.V */
18750 gen_msa_elm_3e(env
, ctx
);
18753 generate_exception_end(ctx
, EXCP_RI
);
18757 gen_msa_elm_df(env
, ctx
, df
, n
);
18760 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18762 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18763 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18764 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18765 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18766 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18768 TCGv_i32 twd
= tcg_const_i32(wd
);
18769 TCGv_i32 tws
= tcg_const_i32(ws
);
18770 TCGv_i32 twt
= tcg_const_i32(wt
);
18771 TCGv_i32 tdf
= tcg_temp_new_i32();
18773 /* adjust df value for floating-point instruction */
18774 tcg_gen_movi_i32(tdf
, df
+ 2);
18776 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18778 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18781 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18784 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18787 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18790 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18793 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18796 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18799 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18802 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18805 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18808 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18811 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18814 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18817 tcg_gen_movi_i32(tdf
, df
+ 1);
18818 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18821 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18824 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18826 case OPC_MADD_Q_df
:
18827 tcg_gen_movi_i32(tdf
, df
+ 1);
18828 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18833 case OPC_MSUB_Q_df
:
18834 tcg_gen_movi_i32(tdf
, df
+ 1);
18835 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18838 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18841 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18844 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18847 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18850 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18853 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18856 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18859 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18862 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18865 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18868 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18871 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18874 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 case OPC_MULR_Q_df
:
18877 tcg_gen_movi_i32(tdf
, df
+ 1);
18878 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18881 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18883 case OPC_FMIN_A_df
:
18884 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18886 case OPC_MADDR_Q_df
:
18887 tcg_gen_movi_i32(tdf
, df
+ 1);
18888 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18891 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18894 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18896 case OPC_MSUBR_Q_df
:
18897 tcg_gen_movi_i32(tdf
, df
+ 1);
18898 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18901 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18903 case OPC_FMAX_A_df
:
18904 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18907 MIPS_INVAL("MSA instruction");
18908 generate_exception_end(ctx
, EXCP_RI
);
18912 tcg_temp_free_i32(twd
);
18913 tcg_temp_free_i32(tws
);
18914 tcg_temp_free_i32(twt
);
18915 tcg_temp_free_i32(tdf
);
18918 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18920 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18921 (op & (0x7 << 18)))
18922 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18923 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18924 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18925 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18926 TCGv_i32 twd
= tcg_const_i32(wd
);
18927 TCGv_i32 tws
= tcg_const_i32(ws
);
18928 TCGv_i32 twt
= tcg_const_i32(wt
);
18929 TCGv_i32 tdf
= tcg_const_i32(df
);
18931 switch (MASK_MSA_2R(ctx
->opcode
)) {
18933 #if !defined(TARGET_MIPS64)
18934 /* Double format valid only for MIPS64 */
18935 if (df
== DF_DOUBLE
) {
18936 generate_exception_end(ctx
, EXCP_RI
);
18940 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18943 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18946 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18949 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18952 MIPS_INVAL("MSA instruction");
18953 generate_exception_end(ctx
, EXCP_RI
);
18957 tcg_temp_free_i32(twd
);
18958 tcg_temp_free_i32(tws
);
18959 tcg_temp_free_i32(twt
);
18960 tcg_temp_free_i32(tdf
);
18963 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18965 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18966 (op & (0xf << 17)))
18967 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18968 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18969 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18970 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18971 TCGv_i32 twd
= tcg_const_i32(wd
);
18972 TCGv_i32 tws
= tcg_const_i32(ws
);
18973 TCGv_i32 twt
= tcg_const_i32(wt
);
18974 /* adjust df value for floating-point instruction */
18975 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18977 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18978 case OPC_FCLASS_df
:
18979 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18981 case OPC_FTRUNC_S_df
:
18982 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18984 case OPC_FTRUNC_U_df
:
18985 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18988 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18990 case OPC_FRSQRT_df
:
18991 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18994 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18997 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19000 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19002 case OPC_FEXUPL_df
:
19003 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19005 case OPC_FEXUPR_df
:
19006 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19009 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19012 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19014 case OPC_FTINT_S_df
:
19015 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19017 case OPC_FTINT_U_df
:
19018 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19020 case OPC_FFINT_S_df
:
19021 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19023 case OPC_FFINT_U_df
:
19024 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19028 tcg_temp_free_i32(twd
);
19029 tcg_temp_free_i32(tws
);
19030 tcg_temp_free_i32(twt
);
19031 tcg_temp_free_i32(tdf
);
19034 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19036 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19037 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19038 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19039 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19040 TCGv_i32 twd
= tcg_const_i32(wd
);
19041 TCGv_i32 tws
= tcg_const_i32(ws
);
19042 TCGv_i32 twt
= tcg_const_i32(wt
);
19044 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19046 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19049 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19052 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19055 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19058 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19061 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19064 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19067 MIPS_INVAL("MSA instruction");
19068 generate_exception_end(ctx
, EXCP_RI
);
19072 tcg_temp_free_i32(twd
);
19073 tcg_temp_free_i32(tws
);
19074 tcg_temp_free_i32(twt
);
19077 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19079 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19087 gen_msa_vec_v(env
, ctx
);
19090 gen_msa_2r(env
, ctx
);
19093 gen_msa_2rf(env
, ctx
);
19096 MIPS_INVAL("MSA instruction");
19097 generate_exception_end(ctx
, EXCP_RI
);
19102 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19104 uint32_t opcode
= ctx
->opcode
;
19105 check_insn(ctx
, ASE_MSA
);
19106 check_msa_access(ctx
);
19108 switch (MASK_MSA_MINOR(opcode
)) {
19109 case OPC_MSA_I8_00
:
19110 case OPC_MSA_I8_01
:
19111 case OPC_MSA_I8_02
:
19112 gen_msa_i8(env
, ctx
);
19114 case OPC_MSA_I5_06
:
19115 case OPC_MSA_I5_07
:
19116 gen_msa_i5(env
, ctx
);
19118 case OPC_MSA_BIT_09
:
19119 case OPC_MSA_BIT_0A
:
19120 gen_msa_bit(env
, ctx
);
19122 case OPC_MSA_3R_0D
:
19123 case OPC_MSA_3R_0E
:
19124 case OPC_MSA_3R_0F
:
19125 case OPC_MSA_3R_10
:
19126 case OPC_MSA_3R_11
:
19127 case OPC_MSA_3R_12
:
19128 case OPC_MSA_3R_13
:
19129 case OPC_MSA_3R_14
:
19130 case OPC_MSA_3R_15
:
19131 gen_msa_3r(env
, ctx
);
19134 gen_msa_elm(env
, ctx
);
19136 case OPC_MSA_3RF_1A
:
19137 case OPC_MSA_3RF_1B
:
19138 case OPC_MSA_3RF_1C
:
19139 gen_msa_3rf(env
, ctx
);
19142 gen_msa_vec(env
, ctx
);
19153 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19154 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19155 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19156 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19158 TCGv_i32 twd
= tcg_const_i32(wd
);
19159 TCGv taddr
= tcg_temp_new();
19160 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19162 switch (MASK_MSA_MINOR(opcode
)) {
19164 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19167 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19170 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19173 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19176 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19179 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19182 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19185 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19189 tcg_temp_free_i32(twd
);
19190 tcg_temp_free(taddr
);
19194 MIPS_INVAL("MSA instruction");
19195 generate_exception_end(ctx
, EXCP_RI
);
19201 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19204 int rs
, rt
, rd
, sa
;
19208 /* make sure instructions are on a word boundary */
19209 if (ctx
->pc
& 0x3) {
19210 env
->CP0_BadVAddr
= ctx
->pc
;
19211 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19215 /* Handle blikely not taken case */
19216 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19217 TCGLabel
*l1
= gen_new_label();
19219 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19220 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19221 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19225 op
= MASK_OP_MAJOR(ctx
->opcode
);
19226 rs
= (ctx
->opcode
>> 21) & 0x1f;
19227 rt
= (ctx
->opcode
>> 16) & 0x1f;
19228 rd
= (ctx
->opcode
>> 11) & 0x1f;
19229 sa
= (ctx
->opcode
>> 6) & 0x1f;
19230 imm
= (int16_t)ctx
->opcode
;
19233 decode_opc_special(env
, ctx
);
19236 decode_opc_special2_legacy(env
, ctx
);
19239 decode_opc_special3(env
, ctx
);
19242 op1
= MASK_REGIMM(ctx
->opcode
);
19244 case OPC_BLTZL
: /* REGIMM branches */
19248 check_insn(ctx
, ISA_MIPS2
);
19249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19253 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19257 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19259 /* OPC_NAL, OPC_BAL */
19260 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19262 generate_exception_end(ctx
, EXCP_RI
);
19265 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19268 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19270 check_insn(ctx
, ISA_MIPS2
);
19271 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19272 gen_trap(ctx
, op1
, rs
, -1, imm
);
19275 check_insn(ctx
, ISA_MIPS32R6
);
19276 generate_exception_end(ctx
, EXCP_RI
);
19279 check_insn(ctx
, ISA_MIPS32R2
);
19280 /* Break the TB to be able to sync copied instructions
19282 ctx
->bstate
= BS_STOP
;
19284 case OPC_BPOSGE32
: /* MIPS DSP branch */
19285 #if defined(TARGET_MIPS64)
19289 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19291 #if defined(TARGET_MIPS64)
19293 check_insn(ctx
, ISA_MIPS32R6
);
19294 check_mips_64(ctx
);
19296 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19300 check_insn(ctx
, ISA_MIPS32R6
);
19301 check_mips_64(ctx
);
19303 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19307 default: /* Invalid */
19308 MIPS_INVAL("regimm");
19309 generate_exception_end(ctx
, EXCP_RI
);
19314 check_cp0_enabled(ctx
);
19315 op1
= MASK_CP0(ctx
->opcode
);
19323 #if defined(TARGET_MIPS64)
19327 #ifndef CONFIG_USER_ONLY
19328 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19329 #endif /* !CONFIG_USER_ONLY */
19331 case OPC_C0_FIRST
... OPC_C0_LAST
:
19332 #ifndef CONFIG_USER_ONLY
19333 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19334 #endif /* !CONFIG_USER_ONLY */
19337 #ifndef CONFIG_USER_ONLY
19340 TCGv t0
= tcg_temp_new();
19342 op2
= MASK_MFMC0(ctx
->opcode
);
19345 check_insn(ctx
, ASE_MT
);
19346 gen_helper_dmt(t0
);
19347 gen_store_gpr(t0
, rt
);
19350 check_insn(ctx
, ASE_MT
);
19351 gen_helper_emt(t0
);
19352 gen_store_gpr(t0
, rt
);
19355 check_insn(ctx
, ASE_MT
);
19356 gen_helper_dvpe(t0
, cpu_env
);
19357 gen_store_gpr(t0
, rt
);
19360 check_insn(ctx
, ASE_MT
);
19361 gen_helper_evpe(t0
, cpu_env
);
19362 gen_store_gpr(t0
, rt
);
19365 check_insn(ctx
, ISA_MIPS32R6
);
19367 gen_helper_dvp(t0
, cpu_env
);
19368 gen_store_gpr(t0
, rt
);
19372 check_insn(ctx
, ISA_MIPS32R6
);
19374 gen_helper_evp(t0
, cpu_env
);
19375 gen_store_gpr(t0
, rt
);
19379 check_insn(ctx
, ISA_MIPS32R2
);
19380 save_cpu_state(ctx
, 1);
19381 gen_helper_di(t0
, cpu_env
);
19382 gen_store_gpr(t0
, rt
);
19383 /* Stop translation as we may have switched
19384 the execution mode. */
19385 ctx
->bstate
= BS_STOP
;
19388 check_insn(ctx
, ISA_MIPS32R2
);
19389 save_cpu_state(ctx
, 1);
19390 gen_helper_ei(t0
, cpu_env
);
19391 gen_store_gpr(t0
, rt
);
19392 /* Stop translation as we may have switched
19393 the execution mode. */
19394 ctx
->bstate
= BS_STOP
;
19396 default: /* Invalid */
19397 MIPS_INVAL("mfmc0");
19398 generate_exception_end(ctx
, EXCP_RI
);
19403 #endif /* !CONFIG_USER_ONLY */
19406 check_insn(ctx
, ISA_MIPS32R2
);
19407 gen_load_srsgpr(rt
, rd
);
19410 check_insn(ctx
, ISA_MIPS32R2
);
19411 gen_store_srsgpr(rt
, rd
);
19415 generate_exception_end(ctx
, EXCP_RI
);
19419 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19420 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19421 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19422 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19425 /* Arithmetic with immediate opcode */
19426 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19430 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19432 case OPC_SLTI
: /* Set on less than with immediate opcode */
19434 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19436 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19437 case OPC_LUI
: /* OPC_AUI */
19440 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19442 case OPC_J
... OPC_JAL
: /* Jump */
19443 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19444 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19447 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19448 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19450 generate_exception_end(ctx
, EXCP_RI
);
19453 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19454 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19457 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19460 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19461 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19463 generate_exception_end(ctx
, EXCP_RI
);
19466 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19467 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19470 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19473 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19476 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19478 check_insn(ctx
, ISA_MIPS32R6
);
19479 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19480 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19483 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19486 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19488 check_insn(ctx
, ISA_MIPS32R6
);
19489 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19490 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19495 check_insn(ctx
, ISA_MIPS2
);
19496 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19500 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19502 case OPC_LL
: /* Load and stores */
19503 check_insn(ctx
, ISA_MIPS2
);
19507 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19509 case OPC_LB
... OPC_LH
:
19510 case OPC_LW
... OPC_LHU
:
19511 gen_ld(ctx
, op
, rt
, rs
, imm
);
19515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19517 case OPC_SB
... OPC_SH
:
19519 gen_st(ctx
, op
, rt
, rs
, imm
);
19522 check_insn(ctx
, ISA_MIPS2
);
19523 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19524 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19527 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19528 check_cp0_enabled(ctx
);
19529 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19530 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19531 gen_cache_operation(ctx
, rt
, rs
, imm
);
19533 /* Treat as NOP. */
19536 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19537 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19538 /* Treat as NOP. */
19541 /* Floating point (COP1). */
19546 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19550 op1
= MASK_CP1(ctx
->opcode
);
19555 check_cp1_enabled(ctx
);
19556 check_insn(ctx
, ISA_MIPS32R2
);
19561 check_cp1_enabled(ctx
);
19562 gen_cp1(ctx
, op1
, rt
, rd
);
19564 #if defined(TARGET_MIPS64)
19567 check_cp1_enabled(ctx
);
19568 check_insn(ctx
, ISA_MIPS3
);
19569 check_mips_64(ctx
);
19570 gen_cp1(ctx
, op1
, rt
, rd
);
19573 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19574 check_cp1_enabled(ctx
);
19575 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19577 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19582 check_insn(ctx
, ASE_MIPS3D
);
19583 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19584 (rt
>> 2) & 0x7, imm
<< 2);
19588 check_cp1_enabled(ctx
);
19589 check_insn(ctx
, ISA_MIPS32R6
);
19590 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19594 check_cp1_enabled(ctx
);
19595 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19597 check_insn(ctx
, ASE_MIPS3D
);
19600 check_cp1_enabled(ctx
);
19601 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19602 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19603 (rt
>> 2) & 0x7, imm
<< 2);
19610 check_cp1_enabled(ctx
);
19611 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19617 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19618 check_cp1_enabled(ctx
);
19619 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19621 case R6_OPC_CMP_AF_S
:
19622 case R6_OPC_CMP_UN_S
:
19623 case R6_OPC_CMP_EQ_S
:
19624 case R6_OPC_CMP_UEQ_S
:
19625 case R6_OPC_CMP_LT_S
:
19626 case R6_OPC_CMP_ULT_S
:
19627 case R6_OPC_CMP_LE_S
:
19628 case R6_OPC_CMP_ULE_S
:
19629 case R6_OPC_CMP_SAF_S
:
19630 case R6_OPC_CMP_SUN_S
:
19631 case R6_OPC_CMP_SEQ_S
:
19632 case R6_OPC_CMP_SEUQ_S
:
19633 case R6_OPC_CMP_SLT_S
:
19634 case R6_OPC_CMP_SULT_S
:
19635 case R6_OPC_CMP_SLE_S
:
19636 case R6_OPC_CMP_SULE_S
:
19637 case R6_OPC_CMP_OR_S
:
19638 case R6_OPC_CMP_UNE_S
:
19639 case R6_OPC_CMP_NE_S
:
19640 case R6_OPC_CMP_SOR_S
:
19641 case R6_OPC_CMP_SUNE_S
:
19642 case R6_OPC_CMP_SNE_S
:
19643 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19645 case R6_OPC_CMP_AF_D
:
19646 case R6_OPC_CMP_UN_D
:
19647 case R6_OPC_CMP_EQ_D
:
19648 case R6_OPC_CMP_UEQ_D
:
19649 case R6_OPC_CMP_LT_D
:
19650 case R6_OPC_CMP_ULT_D
:
19651 case R6_OPC_CMP_LE_D
:
19652 case R6_OPC_CMP_ULE_D
:
19653 case R6_OPC_CMP_SAF_D
:
19654 case R6_OPC_CMP_SUN_D
:
19655 case R6_OPC_CMP_SEQ_D
:
19656 case R6_OPC_CMP_SEUQ_D
:
19657 case R6_OPC_CMP_SLT_D
:
19658 case R6_OPC_CMP_SULT_D
:
19659 case R6_OPC_CMP_SLE_D
:
19660 case R6_OPC_CMP_SULE_D
:
19661 case R6_OPC_CMP_OR_D
:
19662 case R6_OPC_CMP_UNE_D
:
19663 case R6_OPC_CMP_NE_D
:
19664 case R6_OPC_CMP_SOR_D
:
19665 case R6_OPC_CMP_SUNE_D
:
19666 case R6_OPC_CMP_SNE_D
:
19667 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19670 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19671 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19676 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19691 check_insn(ctx
, ASE_MSA
);
19692 gen_msa_branch(env
, ctx
, op1
);
19696 generate_exception_end(ctx
, EXCP_RI
);
19701 /* Compact branches [R6] and COP2 [non-R6] */
19702 case OPC_BC
: /* OPC_LWC2 */
19703 case OPC_BALC
: /* OPC_SWC2 */
19704 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19705 /* OPC_BC, OPC_BALC */
19706 gen_compute_compact_branch(ctx
, op
, 0, 0,
19707 sextract32(ctx
->opcode
<< 2, 0, 28));
19709 /* OPC_LWC2, OPC_SWC2 */
19710 /* COP2: Not implemented. */
19711 generate_exception_err(ctx
, EXCP_CpU
, 2);
19714 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19715 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19716 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19718 /* OPC_BEQZC, OPC_BNEZC */
19719 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19720 sextract32(ctx
->opcode
<< 2, 0, 23));
19722 /* OPC_JIC, OPC_JIALC */
19723 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19726 /* OPC_LWC2, OPC_SWC2 */
19727 /* COP2: Not implemented. */
19728 generate_exception_err(ctx
, EXCP_CpU
, 2);
19732 check_insn(ctx
, INSN_LOONGSON2F
);
19733 /* Note that these instructions use different fields. */
19734 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19738 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19739 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19740 check_cp1_enabled(ctx
);
19741 op1
= MASK_CP3(ctx
->opcode
);
19745 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19751 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19752 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19755 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19756 /* Treat as NOP. */
19759 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19773 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19774 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19778 generate_exception_end(ctx
, EXCP_RI
);
19782 generate_exception_err(ctx
, EXCP_CpU
, 1);
19786 #if defined(TARGET_MIPS64)
19787 /* MIPS64 opcodes */
19788 case OPC_LDL
... OPC_LDR
:
19790 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19794 check_insn(ctx
, ISA_MIPS3
);
19795 check_mips_64(ctx
);
19796 gen_ld(ctx
, op
, rt
, rs
, imm
);
19798 case OPC_SDL
... OPC_SDR
:
19799 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19802 check_insn(ctx
, ISA_MIPS3
);
19803 check_mips_64(ctx
);
19804 gen_st(ctx
, op
, rt
, rs
, imm
);
19807 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19808 check_insn(ctx
, ISA_MIPS3
);
19809 check_mips_64(ctx
);
19810 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19812 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19813 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19814 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19815 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19818 check_insn(ctx
, ISA_MIPS3
);
19819 check_mips_64(ctx
);
19820 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19824 check_insn(ctx
, ISA_MIPS3
);
19825 check_mips_64(ctx
);
19826 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19829 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19830 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19831 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19833 MIPS_INVAL("major opcode");
19834 generate_exception_end(ctx
, EXCP_RI
);
19838 case OPC_DAUI
: /* OPC_JALX */
19839 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19840 #if defined(TARGET_MIPS64)
19842 check_mips_64(ctx
);
19844 generate_exception(ctx
, EXCP_RI
);
19845 } else if (rt
!= 0) {
19846 TCGv t0
= tcg_temp_new();
19847 gen_load_gpr(t0
, rs
);
19848 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19852 generate_exception_end(ctx
, EXCP_RI
);
19853 MIPS_INVAL("major opcode");
19857 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19858 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19859 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19862 case OPC_MSA
: /* OPC_MDMX */
19863 /* MDMX: Not implemented. */
19867 check_insn(ctx
, ISA_MIPS32R6
);
19868 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19870 default: /* Invalid */
19871 MIPS_INVAL("major opcode");
19872 generate_exception_end(ctx
, EXCP_RI
);
19877 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19879 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19880 CPUState
*cs
= CPU(cpu
);
19882 target_ulong pc_start
;
19883 target_ulong next_page_start
;
19890 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19893 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19894 ctx
.insn_flags
= env
->insn_flags
;
19895 ctx
.CP0_Config1
= env
->CP0_Config1
;
19897 ctx
.bstate
= BS_NONE
;
19899 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19900 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19901 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19902 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19903 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19904 ctx
.PAMask
= env
->PAMask
;
19905 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19906 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19907 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19908 /* Restore delay slot state from the tb context. */
19909 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19910 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19911 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19912 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19913 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19914 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19915 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
19916 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
19917 restore_cpu_state(env
, &ctx
);
19918 #ifdef CONFIG_USER_ONLY
19919 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19921 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19923 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19924 MO_UNALN
: MO_ALIGN
;
19926 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19927 if (max_insns
== 0) {
19928 max_insns
= CF_COUNT_MASK
;
19930 if (max_insns
> TCG_MAX_INSNS
) {
19931 max_insns
= TCG_MAX_INSNS
;
19934 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19936 while (ctx
.bstate
== BS_NONE
) {
19937 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19940 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19941 save_cpu_state(&ctx
, 1);
19942 ctx
.bstate
= BS_BRANCH
;
19943 gen_helper_raise_exception_debug(cpu_env
);
19944 /* The address covered by the breakpoint must be included in
19945 [tb->pc, tb->pc + tb->size) in order to for it to be
19946 properly cleared -- thus we increment the PC here so that
19947 the logic setting tb->size below does the right thing. */
19949 goto done_generating
;
19952 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19956 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19957 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19958 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19960 decode_opc(env
, &ctx
);
19961 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19962 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19963 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19964 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19965 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19966 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19968 generate_exception_end(&ctx
, EXCP_RI
);
19972 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19973 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19974 MIPS_HFLAG_FBNSLOT
))) {
19975 /* force to generate branch as there is neither delay nor
19979 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19980 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19981 /* Force to generate branch as microMIPS R6 doesn't restrict
19982 branches in the forbidden slot. */
19987 gen_branch(&ctx
, insn_bytes
);
19989 ctx
.pc
+= insn_bytes
;
19991 /* Execute a branch and its delay slot as a single instruction.
19992 This is what GDB expects and is consistent with what the
19993 hardware does (e.g. if a delay slot instruction faults, the
19994 reported PC is the PC of the branch). */
19995 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19999 if (ctx
.pc
>= next_page_start
) {
20003 if (tcg_op_buf_full()) {
20007 if (num_insns
>= max_insns
)
20013 if (tb
->cflags
& CF_LAST_IO
) {
20016 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20017 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20018 gen_helper_raise_exception_debug(cpu_env
);
20020 switch (ctx
.bstate
) {
20022 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20025 save_cpu_state(&ctx
, 0);
20026 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20029 tcg_gen_exit_tb(0);
20037 gen_tb_end(tb
, num_insns
);
20039 tb
->size
= ctx
.pc
- pc_start
;
20040 tb
->icount
= num_insns
;
20044 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20045 && qemu_log_in_addr_range(pc_start
)) {
20046 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20047 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20053 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20057 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20059 #define printfpr(fp) \
20062 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20063 " fd:%13g fs:%13g psu: %13g\n", \
20064 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20065 (double)(fp)->fd, \
20066 (double)(fp)->fs[FP_ENDIAN_IDX], \
20067 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20070 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20071 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20072 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20073 " fd:%13g fs:%13g psu:%13g\n", \
20074 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20076 (double)tmp.fs[FP_ENDIAN_IDX], \
20077 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20082 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20083 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20084 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20085 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20086 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20087 printfpr(&env
->active_fpu
.fpr
[i
]);
20093 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20096 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20097 CPUMIPSState
*env
= &cpu
->env
;
20100 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20101 " LO=0x" TARGET_FMT_lx
" ds %04x "
20102 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20103 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20104 env
->hflags
, env
->btarget
, env
->bcond
);
20105 for (i
= 0; i
< 32; i
++) {
20107 cpu_fprintf(f
, "GPR%02d:", i
);
20108 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20110 cpu_fprintf(f
, "\n");
20113 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20114 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20115 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20117 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20118 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20119 env
->CP0_Config2
, env
->CP0_Config3
);
20120 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20121 env
->CP0_Config4
, env
->CP0_Config5
);
20122 if (env
->hflags
& MIPS_HFLAG_FPU
)
20123 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20126 void mips_tcg_init(void)
20131 /* Initialize various static tables. */
20135 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20136 tcg_ctx
.tcg_env
= cpu_env
;
20138 TCGV_UNUSED(cpu_gpr
[0]);
20139 for (i
= 1; i
< 32; i
++)
20140 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20141 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20144 for (i
= 0; i
< 32; i
++) {
20145 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20147 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20148 /* The scalar floating-point unit (FPU) registers are mapped on
20149 * the MSA vector registers. */
20150 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20151 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20152 msa_wr_d
[i
* 2 + 1] =
20153 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20156 cpu_PC
= tcg_global_mem_new(cpu_env
,
20157 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20158 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20159 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20160 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20162 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20163 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20166 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20167 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20169 bcond
= tcg_global_mem_new(cpu_env
,
20170 offsetof(CPUMIPSState
, bcond
), "bcond");
20171 btarget
= tcg_global_mem_new(cpu_env
,
20172 offsetof(CPUMIPSState
, btarget
), "btarget");
20173 hflags
= tcg_global_mem_new_i32(cpu_env
,
20174 offsetof(CPUMIPSState
, hflags
), "hflags");
20176 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20177 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20179 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20180 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20186 #include "translate_init.c"
20188 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20192 const mips_def_t
*def
;
20194 def
= cpu_mips_find_by_name(cpu_model
);
20197 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20199 env
->cpu_model
= def
;
20200 env
->exception_base
= (int32_t)0xBFC00000;
20202 #ifndef CONFIG_USER_ONLY
20203 mmu_init(env
, def
);
20205 fpu_init(env
, def
);
20206 mvp_init(env
, def
);
20208 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20213 bool cpu_supports_cps_smp(const char *cpu_model
)
20215 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20220 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20223 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20225 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20226 vp
->env
.exception_base
= address
;
20229 void cpu_state_reset(CPUMIPSState
*env
)
20231 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20232 CPUState
*cs
= CPU(cpu
);
20234 /* Reset registers to their default values */
20235 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20236 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20237 #ifdef TARGET_WORDS_BIGENDIAN
20238 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20240 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20241 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20242 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20243 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20244 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20245 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20246 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20247 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20248 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20249 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20250 << env
->cpu_model
->CP0_LLAddr_shift
;
20251 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20252 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20253 env
->CCRes
= env
->cpu_model
->CCRes
;
20254 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20255 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20256 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20257 env
->current_tc
= 0;
20258 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20259 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20260 #if defined(TARGET_MIPS64)
20261 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20262 env
->SEGMask
|= 3ULL << 62;
20265 env
->PABITS
= env
->cpu_model
->PABITS
;
20266 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20267 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20268 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20269 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20270 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20271 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20272 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20273 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20274 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20275 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20276 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20277 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20278 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20279 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20280 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20281 env
->msair
= env
->cpu_model
->MSAIR
;
20282 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20284 #if defined(CONFIG_USER_ONLY)
20285 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20286 # ifdef TARGET_MIPS64
20287 /* Enable 64-bit register mode. */
20288 env
->CP0_Status
|= (1 << CP0St_PX
);
20290 # ifdef TARGET_ABI_MIPSN64
20291 /* Enable 64-bit address mode. */
20292 env
->CP0_Status
|= (1 << CP0St_UX
);
20294 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20295 hardware registers. */
20296 env
->CP0_HWREna
|= 0x0000000F;
20297 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20298 env
->CP0_Status
|= (1 << CP0St_CU1
);
20300 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20301 env
->CP0_Status
|= (1 << CP0St_MX
);
20303 # if defined(TARGET_MIPS64)
20304 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20305 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20306 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20307 env
->CP0_Status
|= (1 << CP0St_FR
);
20311 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20312 /* If the exception was raised from a delay slot,
20313 come back to the jump. */
20314 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20315 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20317 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20319 env
->active_tc
.PC
= env
->exception_base
;
20320 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20321 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20322 env
->CP0_Wired
= 0;
20323 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20324 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20325 if (kvm_enabled()) {
20326 env
->CP0_EBase
|= 0x40000000;
20328 env
->CP0_EBase
|= 0x80000000;
20330 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20331 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20333 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20335 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20336 /* vectored interrupts not implemented, timer on int 7,
20337 no performance counters. */
20338 env
->CP0_IntCtl
= 0xe0000000;
20342 for (i
= 0; i
< 7; i
++) {
20343 env
->CP0_WatchLo
[i
] = 0;
20344 env
->CP0_WatchHi
[i
] = 0x80000000;
20346 env
->CP0_WatchLo
[7] = 0;
20347 env
->CP0_WatchHi
[7] = 0;
20349 /* Count register increments in debug mode, EJTAG version 1 */
20350 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20352 cpu_mips_store_count(env
, 1);
20354 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20357 /* Only TC0 on VPE 0 starts as active. */
20358 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20359 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20360 env
->tcs
[i
].CP0_TCHalt
= 1;
20362 env
->active_tc
.CP0_TCHalt
= 1;
20365 if (cs
->cpu_index
== 0) {
20366 /* VPE0 starts up enabled. */
20367 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20368 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20370 /* TC0 starts up unhalted. */
20372 env
->active_tc
.CP0_TCHalt
= 0;
20373 env
->tcs
[0].CP0_TCHalt
= 0;
20374 /* With thread 0 active. */
20375 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20376 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20380 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20381 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20382 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20383 env
->CP0_Status
|= (1 << CP0St_FR
);
20387 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20391 compute_hflags(env
);
20392 restore_fp_status(env
);
20393 restore_pamask(env
);
20394 cs
->exception_index
= EXCP_NONE
;
20396 if (semihosting_get_argc()) {
20397 /* UHI interface can be used to obtain argc and argv */
20398 env
->active_tc
.gpr
[4] = -1;
20402 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20403 target_ulong
*data
)
20405 env
->active_tc
.PC
= data
[0];
20406 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20407 env
->hflags
|= data
[1];
20408 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20409 case MIPS_HFLAG_BR
:
20411 case MIPS_HFLAG_BC
:
20412 case MIPS_HFLAG_BL
:
20414 env
->btarget
= data
[2];